ngram
listlengths
0
67.8k
[ "Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] = None last_name: Optional[str] = None", "pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] = None", "Optional[str] = None last_name: Optional[str] = None user_id: Optional[int] = None vcard: Optional[str]", "class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] = None last_name: Optional[str] =", "None first_name: Optional[str] = None last_name: Optional[str] = None user_id: Optional[int] = None", "first_name: Optional[str] = None last_name: Optional[str] = None user_id: Optional[int] = None vcard:", "Optional from pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str]", "BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] = None last_name: Optional[str]", "None last_name: Optional[str] = None user_id: Optional[int] = None vcard: Optional[str] = None", "phone_number: Optional[str] = None first_name: Optional[str] = None last_name: Optional[str] = None user_id:", "from pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] =", "= None first_name: Optional[str] = None last_name: Optional[str] = None user_id: Optional[int] =", "= None last_name: Optional[str] = None user_id: Optional[int] = None vcard: Optional[str] =", "import Optional from pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name:", "Optional[str] = None first_name: Optional[str] = None last_name: Optional[str] = None user_id: Optional[int]", "from typing import Optional from pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] =", "import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] = None last_name:", "typing import Optional from pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None" ]
[ "you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you define a function you", "a function you give it what we call parameters # When you are", "{}, we are happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') #", "'🍀') # When you define a function you give it what we call", "define a function you give it what we call parameters # When you", "we call parameters # When you are invoking the function you pass to", "emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you define a function you give", "def greet_user(username, emoji): print(\"Welcome {}, we are happy to have you{}\".format(username, emoji)) greet_user(\"narh\",", "print(\"Welcome {}, we are happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀')", "are happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you", "# When you define a function you give it what we call parameters", "it what we call parameters # When you are invoking the function you", "we are happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When", "to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you define a", "\"🙋\") greet_user(\"kpodo\", '🍀') # When you define a function you give it what", "greet_user(\"kpodo\", '🍀') # When you define a function you give it what we", "emoji): print(\"Welcome {}, we are happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\",", "When you define a function you give it what we call parameters #", "you define a function you give it what we call parameters # When", "call parameters # When you are invoking the function you pass to it", "happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you define", "parameters # When you are invoking the function you pass to it arguments", "what we call parameters # When you are invoking the function you pass", "greet_user(username, emoji): print(\"Welcome {}, we are happy to have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\")", "function you give it what we call parameters # When you are invoking", "have you{}\".format(username, emoji)) greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you define a function", "greet_user(\"narh\", \"🙋\") greet_user(\"kpodo\", '🍀') # When you define a function you give it", "give it what we call parameters # When you are invoking the function", "you give it what we call parameters # When you are invoking the" ]
[ "that a beer which is active on vinmonopolet does not get deactivated. \"\"\"", "assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which", "beer which is active on vinmonopolet does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502,", "beer which is no longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\",", "@pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which is active on vinmonopolet", "get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer", "<reponame>haavardnk/Vinmonopolet-x-Untappd<filename>api/beers/tests/test_deactivate_inactive.py import pytest from django.utils import timezone from datetime import timedelta from beers.models", "import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a beer which is no", "import timedelta from beers.models import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer():", "that a beer which is no longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create(", "from datetime import timedelta from beers.models import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db", "which is no longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True,", "beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a beer which is", "is active on vinmonopolet does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True,", "Test that a beer which is no longer on vinmonopolet gets deactivated. \"\"\"", "vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30)", "a beer which is active on vinmonopolet does not get deactivated. \"\"\" Beer.objects.create(", "timezone from datetime import timedelta from beers.models import Beer from beers.tasks import deactivate_inactive", "- timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def", "on vinmonopolet does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() -", "beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which is", "does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), )", "from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a beer which", "import pytest from django.utils import timezone from datetime import timedelta from beers.models import", "django.utils import timezone from datetime import timedelta from beers.models import Beer from beers.tasks", "deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a beer which is no longer", "import timezone from datetime import timedelta from beers.models import Beer from beers.tasks import", "beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that", "Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer", "import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a", "def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which is active on vinmonopolet does", "no longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() -", "vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active ==", "deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test", ") deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\"", "is no longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now()", "vinmonopolet does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29),", "timedelta from beers.models import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\"", "vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active ==", "vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active", "Test that a beer which is active on vinmonopolet does not get deactivated.", "which is active on vinmonopolet does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\",", "active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == True", "a beer which is no longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502,", "active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False", "\"\"\" Test that a beer which is active on vinmonopolet does not get", "\"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502)", "datetime import timedelta from beers.models import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def", "def test_deactivate_inactive_beer(): \"\"\" Test that a beer which is no longer on vinmonopolet", "longer on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31),", "on vinmonopolet gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), )", "vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db", "= Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a", "test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which is active on vinmonopolet does not", "\"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502)", "not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30)", "\"\"\" Test that a beer which is no longer on vinmonopolet gets deactivated.", "== False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which is active", "gets deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer", "timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active == False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated():", "vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert beer.active", "Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert", "deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=31), ) deactivate_inactive(30) beer =", "from django.utils import timezone from datetime import timedelta from beers.models import Beer from", "Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a beer", "beers.models import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that", "Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer = Beer.objects.get(vmp_id=12611502) assert", "pytest from django.utils import timezone from datetime import timedelta from beers.models import Beer", "@pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test that a beer which is no longer on", "test_deactivate_inactive_beer(): \"\"\" Test that a beer which is no longer on vinmonopolet gets", "active on vinmonopolet does not get deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now()", "False @pytest.mark.django_db def test_active_beer_does_not_get_deactivated(): \"\"\" Test that a beer which is active on", "from beers.models import Beer from beers.tasks import deactivate_inactive @pytest.mark.django_db def test_deactivate_inactive_beer(): \"\"\" Test", "deactivated. \"\"\" Beer.objects.create( vmp_id=12611502, vmp_name=\"<NAME>\", active=True, vmp_updated=timezone.now() - timedelta(days=29), ) deactivate_inactive(30) beer =" ]
[ "+ a) * (1 + K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor", "#self.tw *= factors[0] #self.a *= factors[0] #self.ta *= factors[0] self.b *= factors[0] #self.tb", "{:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt'", "properties of a Hollow Semiellipse with constant wall thickness Tw. The midthickness perimeter", "0.2 < a/b < 0.50 Parameters ---------- d : Section Heigh b :", "\"\"\" Calculate the superellipse cross section properties Superellipses as a function of the", "_tmax _DD = max(_DD , 0.20) _DD = min(_DD , 1.0) if _a", "_Iz = 0.03843 * r**4 _Iz1 = _Iy1 _Iz2 = _Iy2 return _Area,", "/ _tmax _DD = max(_DD , 0.20) _DD = min(_DD , 1.0) if", "self.q) / (self.p * self.q))) / (math.gamma((2 * self.p + self.p * self.q", "_b))**2))) self.Iy = _Iy - self.area * self.Zc**2 _K2 = 0.1349 + 0.1279", "radii of gyration self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area)", "self.q + self.q) / (self.p * self.q))))) # Second Moment of Area about", "from: 1.- Structural Engineering Formulas <NAME> Examples ---------- \"\"\" # Area _Area =", "+ A2 * (_Yc - b2 + Yc2)**2)) _Iyy = Iy1 + Iy2", "Elastic modulus about minor axis rz : Radius of gyration about minor Axis", ": Mayor Axis b : Minor Axis p : q : Returns ----------", "Ic1, Iy1, b2 = 0, A2 = 0, Yc2 = 0, Ic2 =", "and q Parameters ---------- a : Mayor Axis b : Minor Axis p", "add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate the circular and elliptical segments", "HollowSemiellipse: \"\"\" Calculate the section properties of a Hollow Semiellipse with constant wall", "Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n", "self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3 * self.p + self.p", "modulus about minor axis SFz : Shape factor minor axis rz : Radius", "self.tw / _tmax _DD = max(_DD , 0.20) _DD = min(_DD , 1.0)", "# Elastic Modulus about Mayor Axis # -------------------------------------- K4 = 0.1835 + 0.895", "0.002222 * ((_a / _b) + (_b / _a)) _K2 = 1 -", "---------- Uses formulas from: 1.- Geometric properties for the design of unusual member", "maximum # wall thickness allowed in this case. # Cusps will form in", "self.units_in[0]: pass else: print('error length unit must be provided') print(' program aborted') sys.exit()", "segments cross section properties Parameters ---------- a : Mayor Axis b : Minor", "= units.units_module(_unit, value, _units_in) # def get_property(self): # if self.units_in[0]: _units_input = self.units_in", "* _DD # elif _a / _b >= 1.0 and _a / _b", "self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # # #return", "_Ixx, _Sx, _rx, _Iyy, _Sy, _ry # # def hollow_ellipse(a, b, t): \"\"\"", "-------------------------------------- K4 = 0.1835 + 0.895 * a/b - 0.00978 * (a/b)**2 Zey", "ellipse 0.2 < a/b < 0.50 Parameters ---------- d : Section Heigh b", "{:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout =", "Iy = (math.pi * t * a**2 / 4.0 * (a + 3*b)", "4.0 * (b + 3*a) * (1 + K2 * ((b-a)/(b+a))**2) + math.pi", "self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area,", "\\n gravity : [default : 9.81ms^2]\\n ------ units [length, mass, time, temperature, force,", "_b + _a)) * (1 + _K3 * ((_b - _a) / (_b", "area about minor axis Zez : Elastic modulus about minor axis rz :", "the bottom # to the plastic neutral axis _DD = self.tw / _tmax", "_C1 = 0.4829 + 0.0725 * _DD - 0.1815 * _DD**2 _C2 =", "_ry = math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx / _A) # return _A,", "provided') print(' program aborted') sys.exit() # def geometry(self, **kwargs): for key, value in", "section moduli self.Zey = min(self.Ic / _z1, self.Ic / _z2) self.Zez = self.Iz", "x self.Iy = ((self.a * self.b**3 / 8.0) * (2 * _thetaG +", "* (a + 3*b) * (1 + K2 * ((a-b)/(a+b))**2) + math.pi *", "force : temperature : gravity : [default : 9.81ms^2] ------ units [length, mass,", "((_b - _a) / (_b + _a))**2))) # Elastic Modulus about Mayor Axis", "self.q + 3 * self.q) / (self.p * self.q))))) #print('Jy',_Iz / 10**4) #", "((a-b)/(a+b))**2) + math.pi * t**3 / 16.0 * (3*a + b) * (1", "- 0.01284 * (a/b)**2 Iz = (math.pi * t * b**2 / 4.0", "= (_a - _b) / (_a + _b) _K1 = 0.2464 + 0.002222", "powers p and q Parameters ---------- a : Mayor Axis b : Minor", "_Zp = (_a * (_C1 + _C2 / (_a / _b) + _C3", "= 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_a /", "+ K1 * ((a-b)/(a+b))**2) # Centroid Zc = a + t / 2.0", "+ _b) _K1 = 0.2464 + 0.002222 * ((_a / _b) + (_b", "self.tw * _b * (_b + 2 * _a)) * (1 + _K4", "Second moment of area about mayor axis Zey : Elastic modulus about mayor", "1.0 / self.q) * self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q)", ": Angle (degrees) Returns ---------- area: Section area Zc : Elastic neutral centre", "C self.Ic = self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) # The", "0 self.q = 0 self.type = 'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n", "b) * (1 + K1 * ((a-b)/(a+b))**2) # Centroid Zc = a +", "((_b - _a) / (_b + _a))**2)) + (((self.tw**3 * math.pi / 16.0)", "y self.Iz = ((self.a**3 * self.b / 24.0) * (6.0 * _thetaG -", "a hollow closed cross-section with finite thickness t, e.g. a tube, hollow rod,", "4.0 # # Centroid _Zc = 4 * r / (3 * math.pi)", "_Sy = min(_Iyy / _y1, _Iyy / _y2) _Sx = _Ixx / _x1", "[<NAME>] Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build =", "_a**2 / _b else: _tmax = 2 * _b**2 / _a if self.tw", "of gyration about mayor Axis Iz : Second moment of area about minor", "check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout", "self.d *= factors[0] #self.tw *= factors[0] #self.a *= factors[0] #self.ta *= factors[0] self.b", "/ _y2) _Sx = _Ixx / _x1 # radii of gyration _ry =", "(1 + _K4 * ((_a - _b) / (_a + _b))**2)) + (((self.tw**3", "add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate the circular and elliptical segments cross", "def hollow_ellipse(a, b, t): \"\"\" a b t \"\"\" # Area K1 =", "fibres _y1 = self.a _z1 = self.b - self.Zc _z2 = self.Zc #", "= a + t / 2.0 Yc = b + t / 2.0", "if self.units_in[0]: _units_input = self.units_in else: print(' ** error input units not provided')", "_DD**2 # _C5 = 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 * _DD", "self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy,", "= 90 self.p = float(p) self.q = float(q) self.type = 'Super Ellipse' def", "1.0 : _tmax = 2 * _a**2 / _b else: _tmax = 2", "(((self.tw**3 * math.pi / 16.0) * (3 * _b + _a)) * (1", "* _thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2))) # Second Moment of Area", "# -------------------------------------- K4 = 0.1835 + 0.895 * b/a - 0.00978 * (b/a)**2", "the mayor axis if this # maximum is exceeded. if _a/_b < 1.0", "+ 0.0725 * _DD - 0.1815 * _DD**2 _C2 = 0.1957 - 0.6608", "with finite thickness t, e.g. a tube, hollow rod, pipe or cylindrical shell,", "strain and strucutral matrices [W.D. Pilkey] 2.- Roark's formulas for stress and strain", "about y self.Iz = ((2.0 * self.a**3 * self.b / self.p) * ((math.gamma(3.0", "- self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances from the centroid", "0.0079 * _DD - 0.0565 * _DD**2 # _C5 = -0.0292 + 0.3749", "* ((b-a)/(b+a))**2) + t**3 / 3.0 return Area, Zc, Yc, Iy, Zey, Iz,", "self.Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E}", "self.d - 0.50 * self.tw _b = self.b / 2.0 - 0.50 *", "self.Zc = ((2.0 * _a * _K2 / math.pi) + (self.tw**2 * _K3", "* t**3 / 16.0 * (3*b + a) * (1 + K3 *", "Examples ---------- \"\"\" # def __init__(self): # # Build [WELDED / ROLLED] self.build", "about minor axis SFz : Shape factor minor axis rz : Radius of", "'average' self.compactness = 'N/A' self.units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] def", "_b) / (_a + _b) _K1 = 0.2464 + 0.002222 * ((_a /", "self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt'", "self.area * self.Zc**2 # The distances from the centroid to the extreme fibres", "be combined together to make a hollow closed cross-section with finite thickness t,", "+ _a))**2))) # Elastic Modulus about Mayor Axis # -------------------------------------- self.Zey = self.Iy", "Profiles Extension Open cross-sections which are extended to half of the circumference (thetaG", "= 0.0170 - 0.0079 * _DD - 0.0565 * _DD**2 # _C5 =", "time, temperature, force, pressure/stress] \"\"\" for key, value in kwargs.items(): _unit = units.find_unit_case(key)", "= (_a * (_C1 + _C2 / (_a / _b) + _C3 /", "* _DD**2 _C2 = 0.1957 - 0.6608 * _DD + 1.4222 * _DD**2", "math.pi) # Area self.area = self.a * self.b * _thetaG # Centroid self.Zc", "add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate the circular and elliptical sectors", "self.Iz = ((2.0 * self.a**3 * self.b / self.p) * ((math.gamma(3.0 / self.p)", "self.p * self.q + 3 * self.q) / (self.p * self.q))))) #print('Jy',_Iz /", "= self.b - self.Zc _z2 = self.Zc - self.b * math.cos(_thetaG) # elastic", "Python stdlib imports import math # # package imports #import steelpy.units.control as units", "a : Mayor Axis b : Minor Axis thetaG : Angle (degrees) Returns", "which are extended to half of the circumference (thetaG = 1/2pi) may be", "* a/b - 0.01284 * (a/b)**2 K3 = 0.1349 + 0.1279 * b/a", "((a-b)/(a+b))**2)) # Second Moment of Area about Minor Axis # -------------------------------------- K2 =", "self.q) / (self.p * self.q))))) self.Yc = 0 # Second Moment of Area", "2.0 Yc = b + t / 2.0 # Second Moment of Area", "* math.pi / 8.0) * (_a + 3 * _b)) * (1 +", "# elif _a / _b >= 1.0 and _a / _b < 4.0:", "= units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else: print('error length", "/ _a)**2 _K3 = 0.1349 + 0.1279 * (_a / _b) - 0.01284", "provided') print(' process terminated') sys.exit() # units try: _units_output = self.units_out except AttributeError:", "Minor Axis thetaG : Angle (degrees) Returns ---------- area: Section area Zc :", "* self.b / self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) /", "# def __init__(self): # # Build [WELDED / ROLLED] self.build = 'welded' #", "= -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 * _DD _C7 = 1.52110", ": Elastic modulus about mayor axis Zpy : Plastic modulus about mayor axis", "* ((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p +", "+ 0.1279 * (_b / _a) - 0.01284 * (_b / _a)**2 _K3", "of the circumference (thetaG = 1/2pi) may be combined together to make a", "math.sqrt(_DD) + 0.0233 * _DD # elif _a / _b >= 1.0 and", "1.3333 * t * b * (b + 2*a) * (1 + K4", "/ self.area) # #return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez,", "length : [mandatory] force : temperature : gravity : [default : 9.81ms^2] ------", "if b2 == 0: b2 = b1 A2 = A1 Yc2 = Yc1", "0.9929 * _C - 0.2287 * _C**2 - 0.2193 * _C**3 self.area =", "def __init__(self): # Build [WELDED / ROLLED] self.build = 'welded' # Shear Stress", "* (_a / _b) - 0.01284 * (_a / _b)**2 self.Iz = 0.50", "Plastic Modulus about Mayor Axis # -------------------------------------- # Let Zp be the vertical", "quarter of a circle Parameters ---------- r : radius Returns ---------- area: Section", "\"second\", \"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\" Input: ====== length : [mandatory]", "0.1097 * _C**3 _K3 = 1 + 0.9929 * _C - 0.2287 *", "self.Iz / _Yc1 # Plastic Modulus about Mayor Axis # -------------------------------------- # Let", "/ (_a + _b) _K1 = 0.2464 + 0.002222 * ((_a / _b)", "* (_Yc - b2 + Yc2)**2)) _Iyy = Iy1 + Iy2 # Extreme", "gyration self.ry = math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return", "- _a) / (_b + _a))**2)) + (((self.tw**3 * math.pi / 16.0) *", "1 - 0.3314 * _C + 0.0136 * _C**2 + 0.1097 * _C**3", "Shape factor minor axis rz : Radius of gyration about minor Axis SC", "for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in)", "- 1.4078 * _DD**2 _C3 = -0.140 + 0.0179 * _DD + 0.4885", "cross-section with finite thickness t, e.g. a tube, hollow rod, pipe or cylindrical", "*= factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] if self.p", "* math.pi * _a))) _Zc1 = _a + self.tw / 2.0 - self.Zc", "**kwargs): for key, value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type", ": Elastic modulus about mayor axis ry : Radius of gyration about mayor", "# Plastic section moduli mayor axis self.Zpy = (4.0 * _a**2 * self.tw", "Mayor Axis # -------------------------------------- # Let Zp be the vertical distance from the", "steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical Sections Profiles # ----------------------------------------", "= Iy1 _d = b1 + b2 # Total cross area _A =", "file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b,", "- _Yc)**2) + (Ic2 + A2 * (_Yc - b2 + Yc2)**2)) _Iyy", "moduli self.Zey = min(self.Ic / _z1, self.Ic / _z2) self.Zez = self.Iz /", "+ '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate", "= ((2.0 * self.a * self.b**3 / self.q) * ((math.gamma(3.0 / self.q) *", "(_b + _a))**2)) + (((self.tw**3 * math.pi / 16.0) * (3 * _b", "# to the plastic neutral axis _DD = self.tw / _tmax _DD =", "((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis # -------------------------------------- K4 = 0.1835 +", "0.1938 * _DD - 1.4078 * _DD**2 _C3 = -0.140 + 0.0179 *", "modulus about minor axis Zpz : Plastic modulus about minor axis SFz :", "self.b / 24.0) * (6.0 * _thetaG - math.sin(2 * _thetaG) * (3.0", "#------------------------------------------------- # Second Moment of Area about Mayor Axis # -------------------------------------- _K4 =", "def geometry(self, a, b, p=2.0, q=2.0): # self.a = float(a) self.b = float(b)", "/ (_a + _b))**2))) self.Iy = _Iy - self.area * self.Zc**2 _K2 =", "print(' ** error input units not provided') print(' process terminated') sys.exit() # units", "Second moment of area about minor axis Zez : Elastic modulus about minor", "C self.Ic = self.Iy - self.area * self.Zc**2 # The distances from the", "---------- a : Mayor Axis b : Minor Axis thetaG : Angle (degrees)", "* (b/a)**2 Iy = (math.pi * t * a**2 / 4.0 * (a", "_Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1, A1,", "def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2 = 0, A2 = 0,", "= 0.50 * ((((self.tw * _b**2 * math.pi / 4.0) * (_b +", "[mandatory]\\n temperature : \\n gravity : [default : 9.81ms^2]\\n ------ units [length, mass,", "[length, mass, time, temperature, force, pressure/stress] \"\"\" for key, value in kwargs.items(): _unit", "_K3 / (6.0 * math.pi * _a))) _Zc1 = _a + self.tw /", "# class EllipticalSector: \"\"\" Calculate the circular and elliptical sectors cross section properties", "# Second Moment of Area about x self.Iy = ((2.0 * self.a *", "0: b2 = b1 A2 = A1 Yc2 = Yc1 Ic2 = Ic1", "t): \"\"\" a b t \"\"\" # Area K1 = 0.2464 + 0.002222", "_b**2 / _a if self.tw > _tmax : sys.exit('error : t > tmax')", "sys.exit() # units try: _units_output = self.units_out except AttributeError: _units_output = self.units_in self.units_out", "#self.tb *= factors[0] # _a = self.d - 0.50 * self.tw _b =", "unit must be provided') print(' program aborted') sys.exit() # def geometry(self, **kwargs): for", "= (2 * self.b * math.sin(_thetaG)) / (3 * _thetaG) self.Yc = 0", "= 4 * r / (3 * math.pi) _Yc = _Zc # Second", "# get_dimension) # ---------------------------------------- # Elliptical Sections Profiles # ---------------------------------------- # class HollowSemiellipse:", "= units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # def get_property(self): # if self.units_in[0]:", "0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 * _DD # elif _a /", "/ 16.0 * (3*b + a) * (1 + K3 * ((b-a)/(b+a))**2)) #", "- b2 + Yc2)**2)) _Iyy = Iy1 + Iy2 # Extreme fibre distances", "_C2 / (_a / _b) + _C3 / (_a / _b)**2 + _C4", "= str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close()", "- 0.00978 * (_b / _a)**2 self.Zpz = (0.50 * (((1.3333 * self.tw", "(2 * self.q)) * math.gamma((self.p + self.p * self.q + self.q) / (self.p", "0.002222 * (a/b + b/a) Area = math.pi * t * (a +", "90 self.p = float(p) self.q = float(q) self.type = 'Super Ellipse' def units_output(self,", "> tmax') #------------------------------------------------- # Cross-Sectional Area _C = (_a - _b) / (_a", "1.0 and _a / _b < 4.0: _C1 = 0.4829 + 0.0725 *", "_DD**2 _C4 = 0.0578 - 1.6666 * _DD + 2.6012 * _DD**2 #", "'N/A' self.units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\"", "_C8 = 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 * _DD # elif", "8.0) * (2 * _thetaG + math.sin(2 * _thetaG))) # Second Moment of", "_A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry # # def hollow_ellipse(a,", "Modulus about Mayor Axis # -------------------------------------- # Let Zp be the vertical distance", "* self.b / 24.0) * (6.0 * _thetaG - math.sin(2 * _thetaG) *", "= math.pi * r**4 / 16.0 # Second Moment of Area about y", "class EllipticalSegment: \"\"\" Calculate the circular and elliptical segments cross section properties Parameters", "3 * _b)) * (1 + _K4 * ((_a - _b) / (_a", "_thetaG - math.sin( 2 * _thetaG))) # Centroid self.Zc = ((4.0 * self.b", "= 0.05489 * r**4 _Iy2 = math.pi * r**4 / 16.0 # Second", "Area self.area = ((2.0 * self.a * self.b / self.q) * ((math.gamma(1.0 /", "(_a + _b))**2)) + (((self.tw**3 * math.pi / 32.0) * (3 * _a", "b/a - 0.01284 * (b/a)**2 Iy = (math.pi * t * a**2 /", "p and q Parameters ---------- a : Mayor Axis b : Minor Axis", "math # # package imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions,", "= ((2.0 * self.a**3 * self.b / self.p) * ((math.gamma(3.0 / self.p) *", "* (_a / _b) - 0.01284 * (_a / _b)**2 _K5 = 0.1349", "kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else:", "((self.a**3 * self.b / 8.0) * (2 * _thetaG - math.sin(2 * _thetaG)))", "* self.b * _thetaG # Centroid self.Zc = (2 * self.b * math.sin(_thetaG))", "self.Yc = 0 # Second Moment of Area about x self.Iy = ((2.0", "for stress, strain and strucutral matrices [W.D. Pilkey] 2.- Roark's formulas for stress", "_DD**2 _C2 = 0.1957 - 0.6608 * _DD + 1.4222 * _DD**2 _C3", "_thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi) # Area self.area =", "else: _tmax = 2 * _b**2 / _a if self.tw > _tmax :", "_y1, _Iyy / _y2) _Sx = _Ixx / _x1 # radii of gyration", "minor Axis SC : Shear centre Cw : Warping constant Notes ---------- Uses", "+ 0.0578 * _DD _C6 = 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882", "Zp be the vertical distance from the bottom # to the plastic neutral", "/ (self.p * self.q))) / (math.gamma((2 * self.p + self.p * self.q +", "math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz,", "# # def hollow_ellipse(a, b, t): \"\"\" a b t \"\"\" # Area", "_DD + 0.4885 * _DD**2 _C4 = 0.0170 - 0.0079 * _DD -", "/ self.q)) / (math.gamma((self.p + self.p * self.q + 3 * self.q) /", "Examples ---------- \"\"\" # Area _Area = math.pi * r**2 / 4.0 #", "Iy1 + Iy2 # Extreme fibre distances _x1 = a _y1 = _d", "mayor axis Zey : Elastic modulus about mayor axis ry : Radius of", "/ _b) - 0.01284 * (_b / _a)**2 _Iy = ((((self.tw * _a**2", "A1, Yc1, Ic1, Iy1, b2 = 0, A2 = 0, Yc2 = 0,", "# return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry # #", "Iy2 # Extreme fibre distances _x1 = a _y1 = _d - _Yc", "A2 # Centroidal C-axis of full section _Yc = (A1 * (Yc1 +", "_DD + 1.3820 * _DD**2 _C2 = 0.3731 + 0.1938 * _DD -", "a tube, hollow rod, pipe or cylindrical shell, \"\"\" # check if section", "self.Iy = _Iy - self.area * self.Zc**2 _K2 = 0.1349 + 0.1279 *", "r**4 _Iy2 = math.pi * r**4 / 16.0 # Second Moment of Area", "Superellipses as a function of the powers p and q Parameters ---------- a", "= ((math.pow(4, 1.0 / self.q) * self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0", "p & q > 0\") # Area self.area = ((2.0 * self.a *", "9.81ms^2] ------ units [length, mass, time, temperature, force, pressure/stress] \"\"\" for key, value", "x self.Iy = ((2.0 * self.a * self.b**3 / self.q) * ((math.gamma(3.0 /", "neutral centre Yc : Elastic neutral centre Iy : Second moment of area", "/ (_b + _a))**2)) + (((self.tw**3 * math.pi / 16.0) * (3 *", "Iy1, b2 = 0, A2 = 0, Yc2 = 0, Ic2 = 0,", "= -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 * _DD # else :", "self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, _Iy, _Zey,", "* _DD _C8 = 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 * _DD", "_b = self.b / 2.0 - 0.50 * self.tw # Note : there", "(math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q))))) # Centroid", "self.q) * self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2", "K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis # -------------------------------------- K4 =", "# Area K1 = 0.2464 + 0.002222 * (a/b + b/a) Area =", "def quarterCircle(r): \"\"\" Calculate a quarter of a circle Parameters ---------- r :", "0.01284 * (_a / _b)**2 _K5 = 0.1349 + 0.1279 * (_a /", "1/2pi) may be combined together to make a hollow closed cross-section with finite", "* (_b / _a)**2 self.Zpz = (0.50 * (((1.3333 * self.tw * _b", "_DD + 2.6012 * _DD**2 # _C5 = 0.22410 - 0.3922 * math.sqrt(_DD)", "_Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E}", "/ (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2 * self.q)) *", "* self.tw _b = self.b / 2.0 - 0.50 * self.tw # Note", "about y self.Iz = ((self.a**3 * self.b / 8.0) * (2 * _thetaG", "b * (b + 2*a) * (1 + K4 * ((b-a)/(b+a))**2) + t**3", "+ self.p) / self.p)) / (math.gamma((self.p + self.p * self.q + self.q) /", "+ 2.8763 * math.sqrt(_DD) - 1.8874 * _DD # else : sys.exit('error a/b", "fibres _y1 = self.a * math.sin(_thetaG) _z1 = self.b - self.Zc _z2 =", "this case. # Cusps will form in the perimeter at # the ends", "self.p)) / (math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q)))))", "_z1, self.Ic / _z2) self.Zez = self.Iz / _y1 # plastic section moduli", "* (1 + _K3 * ((_b - _a) / (_b + _a))**2))) #", "mayor axis if this # maximum is exceeded. if _a/_b < 1.0 :", "= 1.3333 * t * b * (b + 2*a) * (1 +", "* _thetaG) self.Yc = 0 # Second Moment of Area about x self.Iy", "- math.sin( 2 * _thetaG))) # Centroid self.Zc = ((4.0 * self.b *", "float(q) self.type = 'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n", "\"\"] for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value,", "Centroid _Zc = 4 * r / (3 * math.pi) _Yc = _Zc", "self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz # def", "# Plastic section moduli minor axis _K4 = 0.1835 + 0.895 * (_b", "math.pi / 16.0) * (3 * _b + _a)) * (1 + _K3", "'.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate the", "_Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz # # def", "= str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector:", "0.01284 * (b/a)**2 K3 = 0.1349 + 0.1279 * a/b - 0.01284 *", "< a/b < 0.50 Parameters ---------- d : Section Heigh b : Base", "# elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic / _z2) self.Zez", "_b))**2)) # Centroid self.Zc = ((2.0 * _a * _K2 / math.pi) +", "_a / _b > 0.25 and _a / _b < 1.0: _C1 =", "Area K1 = 0.2464 + 0.002222 * (a/b + b/a) Area = math.pi", "* (a/b)**2 K3 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2", "factors[0] self.b *= factors[0] #self.tb *= factors[0] # _a = self.d - 0.50", "- 0.01284 * (_a / _b)**2 self.Iz = 0.50 * ((((self.tw * _b**2", "unit must be provided') print(' program aborted') sys.exit() # def geometry(self, a, b,", "# Copyright (c) 2019-2021 steelpy # # Python stdlib imports import math #", "'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force :", "#self.ta *= factors[0] self.b *= factors[0] #self.tb *= factors[0] # _a = self.d", "math.sqrt(_DD) + 0.3882 * _DD _C7 = -0.1218 + 0.3563 * math.sqrt(_DD) -", "0.20) _DD = min(_DD , 1.0) if _a / _b > 0.25 and", "_DD = max(_DD , 0.20) _DD = min(_DD , 1.0) if _a /", "q : Returns ---------- area: Section area Zc : Elastic neutral centre Yc", "* (2 * _thetaG - math.sin(2 * _thetaG))) # Second Moment of Area", "/ _a)**2 self.Zpz = (0.50 * (((1.3333 * self.tw * _b * (_b", "self.Iz / _y1 # plastic section moduli _Zpy = 0 _Zpz = 0", "open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate a quarter of a", "(_b / _a) - 0.01284 * (_b / _a)**2 _K3 = 0.1349 +", "(b2 - Yc2)) / _A # Second moment of full area _Ixx =", "math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # # #return self.area, self.Zc,", "\"\"\" # Area K1 = 0.2464 + 0.002222 * (a/b + b/a) Area", "_Sy, _ry # # def hollow_ellipse(a, b, t): \"\"\" a b t \"\"\"", "(1 + _K5 * ((_a - _b) / (_a + _b))**2))) self.Iy =", "Radius of gyration about minor Axis Notes ---------- Uses formulas from: 1.- Structural", "* (1 + _K2 * ((_b - _a) / (_b + _a))**2)) +", "/ self.area) # #return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez,", "_a) - 0.01284 * (_b / _a)**2 _K3 = 0.1349 + 0.1279 *", "str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\"", "self.q) / (self.p * self.q))))) # Second Moment of Area about y self.Iz", "of area about minor axis Zez : Elastic modulus about minor axis Zpz", "0.01284 * (a/b)**2 Iz = (math.pi * t * b**2 / 4.0 *", "from: 1.- Geometric properties for the design of unusual member cross-sections in bending", "self.Iz = ((self.a**3 * self.b / 24.0) * (6.0 * _thetaG - math.sin(2", "Cusps will form in the perimeter at # the ends of the mayor", "centroid to the extreme fibres _y1 = self.a _z1 = self.b - self.Zc", "the extreme fibres _y1 = self.a _z1 = self.b - self.Zc _z2 =", "/ 3.0 # Elastic Modulus about Minor Axis # -------------------------------------- K4 = 0.1835", "self.p * self.q + self.q) / (self.p * self.q))))) self.Yc = 0 #", "2.- Roark's formulas for stress and strain [7th Edition] 3.- Wikipedia Examples ----------", "* (_b / _a) - 0.01284 * (_b / _a)**2 _K3 = 0.1349", "the circular and elliptical sectors cross section properties Parameters ---------- a : Mayor", "3*a) * (1 + K2 * ((b-a)/(b+a))**2) + math.pi * t**3 / 16.0", "= 1.3333 * t * a * (a + 2*b) * (1 +", "* (b2 - Yc2)) / _A # Second moment of full area _Ixx", "_K5 * ((_a - _b) / (_a + _b))**2))) self.Iy = _Iy -", "* math.sin(_thetaG) _z1 = self.b - self.Zc _z2 = self.Zc - self.b *", "= (4.0 * _a**2 * self.tw * (_C5 + _C6 / (_a /", "self.p)) / (math.gamma((3 * self.p + self.p * self.q + self.q) / (self.p", "r**2 / 4.0 # # Centroid _Zc = 4 * r / (3", "sys.exit(\"error p & q > 0\") # Area self.area = ((2.0 * self.a", "self.a * self.b * (2 * _thetaG - math.sin( 2 * _thetaG))) #", "_b) - 0.01284 * (_a / _b)**2 _K5 = 0.1349 + 0.1279 *", "tube, hollow rod, pipe or cylindrical shell, \"\"\" # check if section is", "+ t / 2.0 # Second Moment of Area about Mayor Axis #", "print(' program aborted') sys.exit() # def geometry(self, a, b, thetaG): # self.a =", "(_b / _a)**2 _Iy = ((((self.tw * _a**2 * math.pi / 8.0) *", "+ K3 * ((a-b)/(a+b))**2)) # Second Moment of Area about Minor Axis #", "_b)**3)) _Yp = 0 # Plastic section moduli mayor axis self.Zpy = (4.0", "check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name)", ", 1.0) if _a / _b > 0.25 and _a / _b <", "= units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *= factors[0] #self.tw *= factors[0] #self.a", "/ _b)**3)) # Plastic section moduli minor axis _K4 = 0.1835 + 0.895", "_Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1, A1, Yc1, Ic1,", "*= factors[0] #self.ta *= factors[0] self.b *= factors[0] #self.tb *= factors[0] # _a", "/ self.area) # # #return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz,", "- _b) / (_a + _b))**2))) self.Iy = _Iy - self.area * self.Zc**2", ": Mayor Axis b : Minor Axis thetaG : Angle (degrees) Returns ----------", "self.p + self.p * self.q + self.q) / (self.p * self.q))))) self.Yc =", "= self.Iz / _Yc1 # Plastic Modulus about Mayor Axis # -------------------------------------- #", "# Section Properties #------------------------------------------------- # Second Moment of Area about Mayor Axis #", "self.Zc self.Yc = 0 _Yc1 = _b + self.tw / 2.0 #------------------------------------------------- #", "constant wall thickness Tw. The midthickness perimeter is an ellipse 0.2 < a/b", "Elastic modulus about mayor axis Zpy : Plastic modulus about mayor axis SFy", "Second Moment of Area about y self.Iz = ((self.a**3 * self.b / 24.0)", "units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else: print('error length unit must be provided')", "_units_output self.d *= factors[0] #self.tw *= factors[0] #self.a *= factors[0] #self.ta *= factors[0]", "factors[0] #self.tw *= factors[0] #self.a *= factors[0] #self.ta *= factors[0] self.b *= factors[0]", "self.Zc # elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic / _z2)", "_Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def", "/ _b)**2 + _C4 / (_a / _b)**3)) _Yp = 0 # Plastic", "Axis thetaG : Angle (degrees) Returns ---------- area: Section area Zc : Elastic", "/ self.q) * self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) /", "case. # Cusps will form in the perimeter at # the ends of", "0.1279 * a/b - 0.01284 * (a/b)**2 Iz = (math.pi * t *", "add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate the circular", "Sadowski] Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build =", "_b) + _C3 / (_a / _b)**2 + _C4 / (_a / _b)**3))", "# Shear Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A' self.units_in", "+ 0.895 * a/b - 0.00978 * (a/b)**2 Zey = 1.3333 * t", "_C2 = 0.3731 + 0.1938 * _DD - 1.4078 * _DD**2 _C3 =", "unusual member cross-sections in bending [A.J. Sadowski] Examples ---------- \"\"\" def __init__(self): #", "self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *= factors[0] self.b *=", "0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 K3 = 0.1349 +", "/ (math.gamma((2 * self.p + self.p * self.q + self.q) / (self.p *", "= math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi) # Area self.area = (0.50", "properties Superellipses as a function of the powers p and q Parameters ----------", "self.b / self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) / self.p))", "mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\", \"\", \"\",", "program aborted') sys.exit() # def geometry(self, a, b, p=2.0, q=2.0): # self.a =", "_a / _b < 4.0: _C1 = 0.4829 + 0.0725 * _DD -", "* (_a + _b) * (1.0 + _K1 * ((_a - _b) /", "horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2 # The distances", "(self.p * self.q))))) #print('Jy',_Iz / 10**4) # Second Moment of Area about the", "_Ixx / _x1 # radii of gyration _ry = math.sqrt(_Iyy / _A) _rx", "_K5 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_b", "_C7 = 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 * _DD _C8 =", "(degrees) Returns ---------- area: Section area Zc : Elastic neutral centre Yc :", "math.sin(_thetaG)) / (3 * _thetaG) self.Yc = 0 # Second Moment of Area", "+ 0.0233 * _DD # elif _a / _b >= 1.0 and _a", "matrices [W.D. Pilkey] 2.- Roark's formulas for stress and strain [7th Edition] 3.-", "_Sx, _rx, _Iyy, _Sy, _ry # # def hollow_ellipse(a, b, t): \"\"\" a", "0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 K3 = 0.1349 +", "Sections Profiles Extension Open cross-sections which are extended to half of the circumference", "math.cos(_thetaG) # elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic / _z2)", "= float(p) self.q = float(q) self.type = 'Super Ellipse' def units_output(self, **kwargs): \"\"\"", "Mayor Axis # -------------------------------------- _K4 = 0.1349 + 0.1279 * (_a / _b)", "Mayor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * a/b - 0.01284", "+ 0.895 * b/a - 0.00978 * (b/a)**2 Zez = 1.3333 * t", "= ((self.a**3 * self.b / 8.0) * (2 * _thetaG - math.sin(2 *", "* self.b / self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) /", "(_b / _a)**2 _K3 = 0.1349 + 0.1279 * (_a / _b) -", "_Iyy, _Sy, _ry # # def hollow_ellipse(a, b, t): \"\"\" a b t", "x _Iy = 0.07135 * r**4 _Iy1 = 0.05489 * r**4 _Iy2 =", "#------------------------------------------------- # Section Properties #------------------------------------------------- # Second Moment of Area about Mayor Axis", "Notes ---------- Uses formulas from: 1.- Geometric properties for the design of unusual", "in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # def get_property(self):", "unusual member cross-sections in bending [<NAME>] Examples ---------- \"\"\" def __init__(self): # Build", "Area about x self.Iy = ((self.a * self.b**3 / 8.0) * (2 *", "# Area self.area = self.a * self.b * _thetaG # Centroid self.Zc =", "math.sin(2 * _thetaG))) # Second Moment of Area about y self.Iz = ((self.a**3", "(3.0 + 2.0 * math.sin(_thetaG)**2))) # Second Moment of Area about the horizontal", "factors[0] #self.a *= factors[0] #self.ta *= factors[0] self.b *= factors[0] #self.tb *= factors[0]", "add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate a quarter of a circle", "about mayor axis Zpy : Plastic modulus about mayor axis SFy : Shape", "max(_DD , 0.20) _DD = min(_DD , 1.0) if _a / _b >", "# Second Moment of Area about Mayor Axis # -------------------------------------- _K4 = 0.1349", "area about mayor axis Zey : Elastic modulus about mayor axis ry :", "# Second moment of full area _Ixx = ((Ic1 + A1 * (Yc1", "* b/a - 0.01284 * (b/a)**2 Iy = (math.pi * t * a**2", "= open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate the superellipse cross", "self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0 / self.q) * self.b / (2", "* (1 + _K4 * ((_b - _a) / (_a + _b))**2)) +", "# # Build [WELDED / ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM", "factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi)", "(0.50 * self.a * self.b * (2 * _thetaG - math.sin( 2 *", "self.area) self.rz = math.sqrt(self.Iz / self.area) # #return self.area, _Zc, _Yc, _Iy, _Zey,", "+ (((self.tw**3 * math.pi / 32.0) * (3 * _a + _b)) *", "> 4 or a/b < 0.25') # Plastic neutral axis _Zp = (_a", "self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic,", "Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature", "minor Axis Notes ---------- Uses formulas from: 1.- Geometric properties for the design", "Area about y _Iz = 0.03843 * r**4 _Iz1 = _Iy1 _Iz2 =", "Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * b/a - 0.01284 *", "_K4 * ((_a - _b) / (_a + _b))**2)) + (((self.tw**3 * math.pi", "* (_b + 2 * _a)) * (1 + _K4 * ((_b -", "+ 1.4222 * _DD**2 _C3 = 0.0203 + 1.8999 * _DD - 3.4356", "file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def", "((math.gamma((2.0 + self.q) / (2 * self.q)) * math.gamma((self.p + self.p * self.q", "2.0 - 0.50 * self.tw # Note : there is a limit on", "a/b - 0.00978 * (a/b)**2 Zey = 1.3333 * t * a *", "= _Yc # Elastic section moduli _Sy = min(_Iyy / _y1, _Iyy /", "* (a/b)**2 Iz = (math.pi * t * b**2 / 4.0 * (b", "in the perimeter at # the ends of the mayor axis if this", "3.- Wikipedia Examples ---------- \"\"\" # def __init__(self): # # Build [WELDED /", "* _a + _b)) * (1 + _K5 * ((_a - _b) /", "= split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out =", "\"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature : \\n gravity :", "# Area _Area = math.pi * r**2 / 4.0 # # Centroid _Zc", "* math.sqrt(_DD) + 3.9286 * _DD _C8 = -0.8498 + 2.8763 * math.sqrt(_DD)", "Notes ---------- Uses formulas from: 1.- Structural Engineering Formulas <NAME> Examples ---------- \"\"\"", "\"\"\" for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value,", ": Minor Axis p : q : Returns ---------- area: Section area Zc", "a quarter of a circle Parameters ---------- r : radius Returns ---------- area:", "_units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] for key, value in kwargs.items():", "math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG - math.sin(2 * _thetaG)))) self.Yc =", "Minor Axis # -------------------------------------- K4 = 0.1835 + 0.895 * b/a - 0.00978", "= math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry,", "- 0.2193 * _C**3 self.area = ((self.tw * math.pi / 2.0) * (_a", "a/b - 0.01284 * (a/b)**2 K3 = 0.1349 + 0.1279 * b/a -", "Second Moment of Area about y self.Iz = ((2.0 * self.a**3 * self.b", "(0.50 * (((1.3333 * self.tw * _b * (_b + 2 * _a))", ": [mandatory]\\n force : [mandatory]\\n temperature : \\n gravity : [default : 9.81ms^2]\\n", "about x self.Iy = ((self.a * self.b**3 / 8.0) * (2 * _thetaG", "factor minor axis rz : Radius of gyration about minor Axis SC :", "circular and elliptical segments cross section properties Parameters ---------- a : Mayor Axis", "((_a - _b) / (_a + _b))**2)) + (((self.tw**3 * math.pi / 32.0)", "* _DD**2 _C2 = 0.3731 + 0.1938 * _DD - 1.4078 * _DD**2", "aborted') sys.exit() # def geometry(self, **kwargs): for key, value in kwargs.items(): _dim =", "*= factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG =", "#return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz #", "temperature : \\n gravity : [default : 9.81ms^2]\\n ------ units [length, mass, time,", "+ '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate", "= 1 + 0.9929 * _C - 0.2287 * _C**2 - 0.2193 *", "str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\"", "* _C**2 - 0.2193 * _C**3 self.area = ((self.tw * math.pi / 2.0)", "- math.sin(2 * _thetaG)))) self.Yc = 0 # Second Moment of Area about", "Extreme fibre distances _x1 = a _y1 = _d - _Yc _y2 =", "print('ok') # class EllipticalSegment: \"\"\" Calculate the circular and elliptical segments cross section", "y self.Iz = ((self.a**3 * self.b / 8.0) * (2 * _thetaG -", "_Zpy, _ry, _Iz, _Zez, _Zpz, _rz # # def print_file(self, file_name): check_out =", "# Second Moment of Area about Minor Axis # -------------------------------------- K2 = 0.1349", "b : Base tw : Wall thickness Returns ---------- area: Section area Zc", "/ (self.p * self.q))))) #print('Jy',_Iz / 10**4) # Second Moment of Area about", "get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print(' ** error input units", "\"\"\" Calculate the section properties of a Hollow Semiellipse with constant wall thickness", "math.sin( 2 * _thetaG))) # Centroid self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3)", "# else : sys.exit('error a/b > 4 or a/b < 0.25') # Plastic", "(math.gamma((3 * self.p + self.p * self.q + self.q) / (self.p * self.q)))))", "Shear centre Cw : Warping constant Notes ---------- Uses formulas from: 1.- Formulas", "# Elastic Modulus about Mayor Axis # -------------------------------------- self.Zey = self.Iy / _Zc1", "(1 + _K3 * ((_b - _a) / (_b + _a))**2))) # Elastic", "0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 Iy = (math.pi *", "self.Zc**2 _K2 = 0.1349 + 0.1279 * (_b / _a) - 0.01284 *", "0.1957 - 0.6608 * _DD + 1.4222 * _DD**2 _C3 = 0.0203 +", "* math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG - math.sin(2 * _thetaG)))) self.Yc", "/ (2 * self.q)) * math.gamma((self.p + self.p * self.q + self.q) /", "+ (_b / _a)) _K2 = 1 - 0.3314 * _C + 0.0136", "= _units_output self.a *= factors[0] self.b *= factors[0] self.p *= factors[0] self.q *=", "Moment of Area about Mayor Axis # -------------------------------------- K2 = 0.1349 + 0.1279", "+ self.p) / self.p)) / (math.gamma((3 * self.p + self.p * self.q +", "Uses formulas from: 1.- Geometric properties for the design of unusual member cross-sections", "def __init__(self): # # Build [WELDED / ROLLED] self.build = 'welded' # Shear", "/ (math.gamma((self.p + self.p * self.q + 3 * self.q) / (self.p *", "Elastic Modulus about Minor Axis # -------------------------------------- K4 = 0.1835 + 0.895 *", "/ self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3 * self.p +", "\"\"\" Elliptical Sections Profiles Extension Open cross-sections which are extended to half of", "Engineering Formulas <NAME> Examples ---------- \"\"\" # Area _Area = math.pi * r**2", "units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else: print('error length unit", "0.0578 * _DD _C6 = 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 *", "= self.d - 0.50 * self.tw _b = self.b / 2.0 - 0.50", "formulas for stress and strain [7th Edition] 3.- Wikipedia Examples ---------- \"\"\" #", "* (a/b + b/a) Area = math.pi * t * (a + b)", "(A1 * (Yc1 + b2) + A2 * (b2 - Yc2)) / _A", "self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name)", "of the mayor axis if this # maximum is exceeded. if _a/_b <", "+'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') #", "b2 - _Yc)**2) + (Ic2 + A2 * (_Yc - b2 + Yc2)**2))", ": Plastic modulus about minor axis SFz : Shape factor minor axis rz", "_Zp # def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d,", "Minor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * b/a - 0.01284", "(1 + K1 * ((a-b)/(a+b))**2) # Centroid Zc = a + t /", "_a = self.d - 0.50 * self.tw _b = self.b / 2.0 -", "# def get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print(' ** error", "centroid to the extreme fibres _y1 = self.a * math.sin(_thetaG) _z1 = self.b", "# class EllipticalSegment: \"\"\" Calculate the circular and elliptical segments cross section properties", "self.q = float(q) self.type = 'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length", "self.Zc = ((math.pow(4, 1.0 / self.q) * self.b / (2 * math.sqrt(math.pi))) *", "_A = A1 + A2 # Centroidal C-axis of full section _Yc =", "_z2 = self.Zc # elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic", "def get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print(' ** error input", "error input units not provided') print(' process terminated') sys.exit() # units try: _units_output", "* _a**2 * math.pi / 8.0) * (_a + 3 * _b)) *", "# Second Moment of Area about x _Iy = 0.07135 * r**4 _Iy1", "0.50 * math.pi) # Area self.area = (0.50 * self.a * self.b *", "plastic section moduli _Zpy = 0 _Zpz = 0 # radii of gyration", "+ self.q) / (self.p * self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0 /", "full area _Ixx = ((Ic1 + A1 * (Yc1 + b2 - _Yc)**2)", "math.sin(4 * _thetaG))) # Second Moment of Area about y self.Iz = ((self.a**3", "* (1 + K3 * ((a-b)/(a+b))**2)) # Second Moment of Area about Minor", "class SuperEllipse: \"\"\" Calculate the superellipse cross section properties Superellipses as a function", "self.b *= factors[0] #self.tb *= factors[0] # _a = self.d - 0.50 *", "(a + b) * (1 + K1 * ((a-b)/(a+b))**2) # Centroid Zc =", "_Yp = 0 # Plastic section moduli mayor axis self.Zpy = (4.0 *", "[\"\", \"\", \"second\", \"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\" Input: ====== length", "0 # Second Moment of Area about x self.Iy = ((2.0 * self.a", "/ self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) /", "centroidal C self.Ic = self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) #", "---------------------------------------- # Elliptical Sections Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the", "# Let Zp be the vertical distance from the bottom # to the", "self.b / 2.0 - 0.50 * self.tw # Note : there is a", "2.6012 * _DD**2 # _C5 = 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960", "0 _Zpz = 0 # radii of gyration self.ry = math.sqrt(self.Ic / self.area)", "imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ----------------------------------------", "b2 = 0, A2 = 0, Yc2 = 0, Ic2 = 0, Iy2", "a) * (1 + K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis", "9.81ms^2]\\n ------ units [length, mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\",", "2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- # Second Moment of Area about Mayor", "Zez : Elastic modulus about minor axis rz : Radius of gyration about", "Structural Engineering Formulas <NAME> Examples ---------- \"\"\" # Area _Area = math.pi *", "about the horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2 #", "/ (6.0 * math.pi * _a))) _Zc1 = _a + self.tw / 2.0", "* self.q) / (self.p * self.q))))) #print('Jy',_Iz / 10**4) # Second Moment of", "self.area) self.rz = math.sqrt(self.Iz / self.area) # # #return self.area, self.Zc, _Yc, self.Ic,", "be the vertical distance from the bottom # to the plastic neutral axis", "+ K4 * ((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic Modulus about Minor", "math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz,", "minor axis Zpz : Plastic modulus about minor axis SFz : Shape factor", "Axis b : Minor Axis p : q : Returns ---------- area: Section", "self.b - self.Zc _z2 = self.Zc # elastic section moduli self.Zey = min(self.Ic", "# def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\"", "- 0.00978 * (a/b)**2 Zey = 1.3333 * t * a * (a", "must be provided') print(' program aborted') sys.exit() # def geometry(self, a, b, thetaG):", "self.units_out except AttributeError: _units_output = self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output)", "a * (a + 2*b) * (1 + K4 * ((a-b)/(a+b))**2) + t**3", "Moment of Area about y self.Iz = ((self.a**3 * self.b / 24.0) *", "Moment of Area about Mayor Axis # -------------------------------------- _K4 = 0.1349 + 0.1279", "float(a) self.b = float(b) self.theta = float(thetaG) self.p = 0 self.q = 0", "pipe or cylindrical shell, \"\"\" # check if section is symmetrical if b2", "SFz : Shape factor minor axis rz : Radius of gyration about minor", "# Area self.area = (0.50 * self.a * self.b * (2 * _thetaG", "+ 3 * _b)) * (1 + _K4 * ((_a - _b) /", "AttributeError: _units_output = self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in =", "= 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 * _DD _C6 = -0.6637", "and strain [7th Edition] 3.- Wikipedia Examples ---------- \"\"\" # def __init__(self): #", "{:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout =", "t > tmax') #------------------------------------------------- # Cross-Sectional Area _C = (_a - _b) /", "# Second Moment of Area about x self.Iy = ((self.a * self.b**3 /", "about mayor axis Zey : Elastic modulus about mayor axis Zpy : Plastic", "axis rz : Radius of gyration about minor Axis SC : Shear centre", "2 * _b**2 / _a if self.tw > _tmax : sys.exit('error : t", "or self.q <= 0: sys.exit(\"error p & q > 0\") # Area self.area", "_rx = math.sqrt(_Ixx / _A) # return _A, _Yc, _x1, _Ixx, _Sx, _rx,", "moment of area about mayor axis Zey : Elastic modulus about mayor axis", "circular and elliptical sectors cross section properties Parameters ---------- a : Mayor Axis", "of gyration about minor Axis Notes ---------- Uses formulas from: 1.- Structural Engineering", "* _thetaG))) # Second Moment of Area about the horizontal centroidal C self.Ic", "math.pi * t**3 / 16.0 * (3*a + b) * (1 + K3", "_d = b1 + b2 # Total cross area _A = A1 +", "_Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2", "= (A1 * (Yc1 + b2) + A2 * (b2 - Yc2)) /", "Open cross-sections which are extended to half of the circumference (thetaG = 1/2pi)", "self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, self.Ic, _Zey,", "* _thetaG)))) self.Yc = 0 # Second Moment of Area about x self.Iy", "self.theta = 90 self.p = float(p) self.q = float(q) self.type = 'Super Ellipse'", "_x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry # # def hollow_ellipse(a, b, t):", "+ self.tw / 2.0 - self.Zc self.Yc = 0 _Yc1 = _b +", "+ 0.0136 * _C**2 + 0.1097 * _C**3 _K3 = 1 + 0.9929", "_Zpz, _rz, _Zp # def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\"", "an ellipse 0.2 < a/b < 0.50 Parameters ---------- d : Section Heigh", "* _K3 / (6.0 * math.pi * _a))) _Zc1 = _a + self.tw", "factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta)", "print('ok') # def quarterCircle(r): \"\"\" Calculate a quarter of a circle Parameters ----------", "_DD**2 _C2 = 0.3731 + 0.1938 * _DD - 1.4078 * _DD**2 _C3", "* _a)) * (1 + _K2 * ((_b - _a) / (_b +", "Calculate the circular and elliptical segments cross section properties Parameters ---------- a :", "tmax') #------------------------------------------------- # Cross-Sectional Area _C = (_a - _b) / (_a +", "Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A' self.units_in = [\"\",", "((2.0 * _a * _K2 / math.pi) + (self.tw**2 * _K3 / (6.0", "0, A2 = 0, Yc2 = 0, Ic2 = 0, Iy2 = 0):", "sys.exit() # def geometry(self, a, b, p=2.0, q=2.0): # self.a = float(a) self.b", "+ _K1 * ((_a - _b) / (_a + _b))**2)) # Centroid self.Zc", "print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout", "= float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical Segment' def", "* t * a * (a + 2*b) * (1 + K4 *", "0 # Plastic section moduli mayor axis self.Zpy = (4.0 * _a**2 *", "self.a *= factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG", "# # Centroid _Zc = 4 * r / (3 * math.pi) _Yc", "= math.sqrt(_Ixx / _A) # return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy,", "Centroid Zc = a + t / 2.0 Yc = b + t", "3.4356 * _DD**2 _C4 = 0.0578 - 1.6666 * _DD + 2.6012 *", "= find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse' # def units_output(self, **kwargs):", "* (b/a)**2 K3 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2", "_rz, _Zp # def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type,", "+ 0.1938 * _DD - 1.4078 * _DD**2 _C3 = -0.140 + 0.0179", "math.pi) # Area self.area = (0.50 * self.a * self.b * (2 *", "/ self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, self.Ic,", "to the extreme fibres _y1 = self.a _z1 = self.b - self.Zc _z2", "* (1 + _K5 * ((_a - _b) / (_a + _b))**2))) self.Iy", "_a)) _K2 = 1 - 0.3314 * _C + 0.0136 * _C**2 +", "+ t**3 / 3.0 # Elastic Modulus about Minor Axis # -------------------------------------- K4", "self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *=", "Axis # -------------------------------------- self.Zey = self.Iy / _Zc1 # self.Zez = self.Iz /", "_z1 = self.b - self.Zc _z2 = self.Zc # elastic section moduli self.Zey", "+ b) * (1 + K1 * ((a-b)/(a+b))**2) # Centroid Zc = a", "(self.p * self.q))) / (math.gamma((2 * self.p + self.p * self.q + self.q)", "float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical Sector' def units_output(self,", "0.3749 * math.sqrt(_DD) + 0.0578 * _DD _C6 = 0.36740 - 0.8531 *", "_tmax = 2 * _a**2 / _b else: _tmax = 2 * _b**2", "_C7 / (_a / _b)**2 + _C8 / (_a / _b)**3)) # Plastic", "(_a / _b)**2 + _C8 / (_a / _b)**3)) # Plastic section moduli", "self.rz = math.sqrt(self.Iz / self.area) # #return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy,", "\"\"\" Calculate the circular and elliptical sectors cross section properties Parameters ---------- a", "* math.sqrt(_DD) - 1.8874 * _DD # else : sys.exit('error a/b > 4", "* _DD**2 _C3 = -0.140 + 0.0179 * _DD + 0.4885 * _DD**2", "mass, time, temperature, force, pressure/stress] \"\"\" for key, value in kwargs.items(): _unit =", "t**3 / 16.0 * (3*a + b) * (1 + K3 * ((a-b)/(a+b))**2))", "centre Yc : Elastic neutral centre Iy : Second moment of area about", "0.0565 * _DD**2 # _C5 = -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578", "Iy2 = 0): \"\"\" Elliptical Sections Profiles Extension Open cross-sections which are extended", "0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_b / _a)**2", "units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # def get_property(self): # if self.units_in[0]: _units_input", "_thetaG + math.sin(2 * _thetaG))) # Second Moment of Area about y self.Iz", "# plastic section moduli _Zpy = 0 _Zpz = 0 # radii of", "* self.b**3 / self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) /", "_DD # else : sys.exit('error a/b > 4 or a/b < 0.25') #", "self.Ic / _z2) self.Zez = self.Iz / _y1 # plastic section moduli _Zpy", "+ A2 * (b2 - Yc2)) / _A # Second moment of full", "= 0.4829 + 0.0725 * _DD - 0.1815 * _DD**2 _C2 = 0.1957", "Moment of Area about x self.Iy = ((2.0 * self.a * self.b**3 /", "b/a - 0.00978 * (b/a)**2 Zez = 1.3333 * t * b *", "about Mayor Axis # -------------------------------------- # Let Zp be the vertical distance from", ": Warping constant Notes ---------- Uses formulas from: 1.- Formulas for stress, strain", "(_b + _a))**2))) # Elastic Modulus about Mayor Axis # -------------------------------------- self.Zey =", "_b)) * (1 + _K5 * ((_a - _b) / (_a + _b))**2)))", "circle Parameters ---------- r : radius Returns ---------- area: Section area Zc :", "self.Zey = min(self.Ic / _z1, self.Ic / _z2) self.Zez = self.Iz / _y1", "_Iy1 = 0.05489 * r**4 _Iy2 = math.pi * r**4 / 16.0 #", "self.a *= factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] if", "((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3 * self.p", "_thetaG = min(_thetaG, 0.50 * math.pi) # Area self.area = self.a * self.b", "/ 8.0) * (_a + 3 * _b)) * (1 + _K4 *", "* _b)) * (1 + _K4 * ((_a - _b) / (_a +", "* self.q))))) #print('Jy',_Iz / 10**4) # Second Moment of Area about the horizontal", "\"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\" Input: ====== length : [mandatory] force", "about Minor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * b/a -", "Ic2 = 0, Iy2 = 0): \"\"\" Elliptical Sections Profiles Extension Open cross-sections", "of gyration about minor Axis SC : Shear centre Cw : Warping constant", "/ (3 * math.pi) _Yc = _Zc # Second Moment of Area about", "self.b**3 / 8.0) * (2 * _thetaG + math.sin(2 * _thetaG))) # Second", "/ math.pi) + (self.tw**2 * _K3 / (6.0 * math.pi * _a))) _Zc1", "* ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis # -------------------------------------- K4 = 0.1835", "(1 + K2 * ((a-b)/(a+b))**2) + math.pi * t**3 / 16.0 * (3*a", "= min(abs(_thetaG), 0.50 * math.pi) # Area self.area = (0.50 * self.a *", "(self.p * self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0 / self.q) * self.b", "_a + self.tw / 2.0 - self.Zc self.Yc = 0 _Yc1 = _b", "4.0) * (_b + 3 * _a)) * (1 + _K2 * ((_b", "0 self.type = 'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n", "0.50 Parameters ---------- d : Section Heigh b : Base tw : Wall", "SFy : Shape factor mayor axis ry : Radius of gyration about mayor", "_a) / (_b + _a))**2)) + (((self.tw**3 * math.pi / 16.0) * (3", "* self.b * math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG - math.sin(2 *", "Second Moment of Area about Minor Axis # -------------------------------------- K2 = 0.1349 +", "section properties Parameters ---------- a : Mayor Axis b : Minor Axis thetaG", "* math.sqrt(_DD) - 2.0482 * _DD _C7 = 1.52110 - 5.3864 * math.sqrt(_DD)", "/ (_a / _b)**3)) _Yp = 0 # Plastic section moduli mayor axis", "* _DD + 0.4885 * _DD**2 _C4 = 0.0170 - 0.0079 * _DD", "self.p) * math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p + self.p * self.q", "package imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) #", "process terminated') sys.exit() # units try: _units_output = self.units_out except AttributeError: _units_output =", "* (_b / _a)**2 _Iy = ((((self.tw * _a**2 * math.pi / 8.0)", "Area _C = (_a - _b) / (_a + _b) _K1 = 0.2464", "{:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name)", "* t * b * (b + 2*a) * (1 + K4 *", "_C4 = 0.0170 - 0.0079 * _DD - 0.0565 * _DD**2 # _C5", "((2.0 * self.a**3 * self.b / self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0", "0.8531 * math.sqrt(_DD) + 0.3882 * _DD _C7 = -0.1218 + 0.3563 *", "# -------------------------------------- self.Zey = self.Iy / _Zc1 # self.Zez = self.Iz / _Yc1", "(2 * self.b * math.sin(_thetaG)) / (3 * _thetaG) self.Yc = 0 #", "* _a)) * (1 + _K4 * ((_b - _a) / (_a +", "* ((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3 *", "0 self.type = 'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n", "math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return self.area, _Zc, _Yc,", "Axis p : q : Returns ---------- area: Section area Zc : Elastic", "* (_a / _b) - 0.01284 * (_b / _a)**2 _Iy = ((((self.tw", "self.q *= factors[0] if self.p <= 0 or self.q <= 0: sys.exit(\"error p", "1.0: _C1 = 0.5067 - 0.5588 * _DD + 1.3820 * _DD**2 _C2", "Centroid self.Zc = ((math.pow(4, 1.0 / self.q) * self.b / (2 * math.sqrt(math.pi)))", "is a limit on the maximum # wall thickness allowed in this case.", "centre Cw : Warping constant Notes ---------- Uses formulas from: 1.- Formulas for", "+ 2*a) * (1 + K4 * ((b-a)/(b+a))**2) + t**3 / 3.0 return", "design of unusual member cross-sections in bending [<NAME>] Examples ---------- \"\"\" def __init__(self):", "Section area Zc : Elastic neutral centre Yc : Elastic neutral centre Iy", "gyration _ry = math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx / _A) # return", ": radius Returns ---------- area: Section area Zc : Elastic neutral centre Yc", "Second Moment of Area about y _Iz = 0.03843 * r**4 _Iz1 =", "factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] if self.p <=", "* self.Zc**2 _K2 = 0.1349 + 0.1279 * (_b / _a) - 0.01284", "/ self.area) # #return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez,", "of Area about y self.Iz = ((self.a**3 * self.b / 24.0) * (6.0", "+ (((self.tw**3 * math.pi / 16.0) * (3 * _b + _a)) *", "* math.pi / 4.0) * (_b + 3 * _a)) * (1 +", "= self.a * math.sin(_thetaG) _z1 = self.b - self.Zc _z2 = self.Zc -", "_b < 1.0: _C1 = 0.5067 - 0.5588 * _DD + 1.3820 *", "temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] for", "= min(self.Ic / _z1, self.Ic / _z2) self.Zez = self.Iz / _y1 #", "* ((a-b)/(a+b))**2)) # Second Moment of Area about Minor Axis # -------------------------------------- K2", "= 0.1835 + 0.895 * (_b / _a) - 0.00978 * (_b /", "/ _b) - 0.01284 * (_a / _b)**2 self.Iz = 0.50 * ((((self.tw", "_units_output = self.units_out except AttributeError: _units_output = self.units_in self.units_out = self.units_in factors =", "Geometric properties for the design of unusual member cross-sections in bending [<NAME>] Examples", ": t > tmax') #------------------------------------------------- # Cross-Sectional Area _C = (_a - _b)", "_a + _b)) * (1 + _K5 * ((_a - _b) / (_a", "- self.Zc _z2 = self.Zc - self.b * math.cos(_thetaG) # elastic section moduli", "+ 0.1279 * b/a - 0.01284 * (b/a)**2 Iy = (math.pi * t", "* _DD - 0.1815 * _DD**2 _C2 = 0.1957 - 0.6608 * _DD", "bottom # to the plastic neutral axis _DD = self.tw / _tmax _DD", "self.Zc _z2 = self.Zc - self.b * math.cos(_thetaG) # elastic section moduli self.Zey", "* _a**2 / _b else: _tmax = 2 * _b**2 / _a if", "* _thetaG))) # Centroid self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3) / (3.0", "_thetaG))) # Second Moment of Area about the horizontal centroidal C self.Ic =", "0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2 self.Iz =", "* math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p + self.p * self.q +", "mayor axis SFy : Shape factor mayor axis ry : Radius of gyration", "Formulas <NAME> Examples ---------- \"\"\" # Area _Area = math.pi * r**2 /", ": Radius of gyration about minor Axis SC : Shear centre Cw :", "about minor axis Zez : Elastic modulus about minor axis rz : Radius", "+ _a))**2)) + (((self.tw**3 * math.pi / 16.0) * (3 * _b +", "= str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse:", "+ 2.7357 * math.sqrt(_DD) - 2.0482 * _DD _C7 = 1.52110 - 5.3864", "16.0) * (3 * _b + _a)) * (1 + _K3 * ((_b", "((((self.tw * _a**2 * math.pi / 8.0) * (_a + 3 * _b))", "b t \"\"\" # Area K1 = 0.2464 + 0.002222 * (a/b +", "+ 2 * _a)) * (1 + _K4 * ((_b - _a) /", "_b**2 * math.pi / 4.0) * (_b + 3 * _a)) * (1", "16.0 * (3*a + b) * (1 + K3 * ((a-b)/(a+b))**2)) # Second", "_b)**2 _K5 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 *", "_Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry # # def hollow_ellipse(a, b,", "[mandatory] force : temperature : gravity : [default : 9.81ms^2] ------ units [length,", "# def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a,", "0.25 and _a / _b < 1.0: _C1 = 0.5067 - 0.5588 *", "/ self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, _Iy,", "print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout =", "of Area about y self.Iz = ((2.0 * self.a**3 * self.b / self.p)", "{:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout", "_Iz1 = _Iy1 _Iz2 = _Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2,", "* math.pi / 32.0) * (3 * _a + _b)) * (1 +", "try: _units_output = self.units_out except AttributeError: _units_output = self.units_in self.units_out = self.units_in factors", "t * a * (a + 2*b) * (1 + K4 * ((a-b)/(a+b))**2)", "Axis Notes ---------- Uses formulas from: 1.- Geometric properties for the design of", "[length, mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\", \"\",", "in bending [A.J. Sadowski] Examples ---------- \"\"\" def __init__(self): # Build [WELDED /", "------ units [length, mass, time, temperature, force, pressure/stress] \"\"\" for key, value in", "value, _units_in) # def get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print('", "b : Minor Axis p : q : Returns ---------- area: Section area", "self.tw * (_C5 + _C6 / (_a / _b) + _C7 / (_a", "_b) - 0.01284 * (_a / _b)**2 self.Iz = 0.50 * ((((self.tw *", "Axis # -------------------------------------- K4 = 0.1835 + 0.895 * b/a - 0.00978 *", "(a/b)**2 Zey = 1.3333 * t * a * (a + 2*b) *", "-0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 * _DD _C6 = 0.36740 -", "= self.a * self.b * _thetaG # Centroid self.Zc = (2 * self.b", "units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *= factors[0] self.b *= factors[0] self.p *=", "K2 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 K3 =", "_a)**2 _Iy = ((((self.tw * _a**2 * math.pi / 8.0) * (_a +", "self.area) # #return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz,", "(2 * _thetaG + math.sin(2 * _thetaG))) # Second Moment of Area about", "Zc : Elastic neutral centre Yc : Elastic neutral centre Iy : Second", "self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG", "Iz : Second moment of area about minor axis Zez : Elastic modulus", "math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p + self.p * self.q + self.q)", "Yc1, Ic1, Iy1, b2 = 0, A2 = 0, Yc2 = 0, Ic2", "(c) 2019-2021 steelpy # # Python stdlib imports import math # # package", "the extreme fibres _y1 = self.a * math.sin(_thetaG) _z1 = self.b - self.Zc", "# # # def get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print('", "b, thetaG): # self.a = float(a) self.b = float(b) self.theta = float(thetaG) self.p", "to half of the circumference (thetaG = 1/2pi) may be combined together to", "self.Iy = ((self.a * self.b**3 / 8.0) * (2 * _thetaG + math.sin(2", "math.pi * r**4 / 16.0 # Second Moment of Area about y _Iz", "* ((_b - _a) / (_b + _a))**2)) + (((self.tw**3 * math.pi /", "= self.b / 2.0 - 0.50 * self.tw # Note : there is", "4.0: _C1 = 0.4829 + 0.0725 * _DD - 0.1815 * _DD**2 _C2", "_unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # # # def get_property(self):", "= 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_b /", "0.01284 * (b/a)**2 Iy = (math.pi * t * a**2 / 4.0 *", "_units_in) # def get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print(' **", "Moment of Area about the horizontal centroidal C self.Ic = self.Iy - self.area", "Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature", "_a/_b < 1.0 : _tmax = 2 * _a**2 / _b else: _tmax", "self.theta = float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical Segment'", "self.Yc = 0 _Yc1 = _b + self.tw / 2.0 #------------------------------------------------- # Section", "32.0) * (3 * _a + _b)) * (1 + _K5 * ((_a", "math.sqrt(_Ixx / _A) # return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy,", "* t * b**2 / 4.0 * (b + 3*a) * (1 +", "units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *= factors[0] #self.tw *= factors[0] #self.a *=", "0.07135 * r**4 _Iy1 = 0.05489 * r**4 _Iy2 = math.pi * r**4", "((a-b)/(a+b))**2) # Centroid Zc = a + t / 2.0 Yc = b", "+ 0.1279 * a/b - 0.01284 * (a/b)**2 K3 = 0.1349 + 0.1279", "(_b / _a)**2 self.Zpz = (0.50 * (((1.3333 * self.tw * _b *", "((b-a)/(b+a))**2) + t**3 / 3.0 return Area, Zc, Yc, Iy, Zey, Iz, Zez", "self.p + self.p * self.q + self.q) / (self.p * self.q))))) # Second", "* (a + 2*b) * (1 + K4 * ((a-b)/(a+b))**2) + t**3 /", "- 1.8874 * _DD # else : sys.exit('error a/b > 4 or a/b", "_Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out", "_b > 0.25 and _a / _b < 1.0: _C1 = 0.5067 -", "= 0 _Yc1 = _b + self.tw / 2.0 #------------------------------------------------- # Section Properties", "= open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate the circular and", "_DD = min(_DD , 1.0) if _a / _b > 0.25 and _a", "* (2 * _thetaG - math.sin(2 * _thetaG)))) self.Yc = 0 # Second", "self.Iy = ((2.0 * self.a * self.b**3 / self.q) * ((math.gamma(3.0 / self.q)", "t * b * (b + 2*a) * (1 + K4 * ((b-a)/(b+a))**2)", "r / (3 * math.pi) _Yc = _Zc # Second Moment of Area", "self.Zc - self.b * math.cos(_thetaG) # elastic section moduli self.Zey = min(self.Ic /", "Yc2 = Yc1 Ic2 = Ic1 Iy2 = Iy1 _d = b1 +", "self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout =", "Section Heigh b : Base tw : Wall thickness Returns ---------- area: Section", "= _Ixx / _x1 # radii of gyration _ry = math.sqrt(_Iyy / _A)", "(_a + _b))**2))) self.Iy = _Iy - self.area * self.Zc**2 _K2 = 0.1349", "self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2 * self.q))", "{:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout", "16.0 * (3*b + a) * (1 + K3 * ((b-a)/(b+a))**2)) # Elastic", "= self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a", "1.- Structural Engineering Formulas <NAME> Examples ---------- \"\"\" # Area _Area = math.pi", "Modulus about Mayor Axis # -------------------------------------- K4 = 0.1835 + 0.895 * a/b", "sys.exit() # def geometry(self, **kwargs): for key, value in kwargs.items(): _dim = find_section_dimensions(key)", "Calculate the section properties of a Hollow Semiellipse with constant wall thickness Tw.", "= units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # # # def get_property(self): #", "_b) * (1.0 + _K1 * ((_a - _b) / (_a + _b))**2))", "of Area about Minor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 *", "# Cusps will form in the perimeter at # the ends of the", "if section is symmetrical if b2 == 0: b2 = b1 A2 =", "+ self.q) / (self.p * self.q))))) self.Yc = 0 # Second Moment of", "section moduli _Zpy = 0 _Zpz = 0 # radii of gyration self.ry", "+ _b))**2)) + (((self.tw**3 * math.pi / 32.0) * (3 * _a +", "0.1279 * b/a - 0.01284 * (b/a)**2 K3 = 0.1349 + 0.1279 *", "axis _DD = self.tw / _tmax _DD = max(_DD , 0.20) _DD =", "axis Zez : Elastic modulus about minor axis rz : Radius of gyration", "self.a**3 * self.b / self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q)", "Plastic section moduli minor axis _K4 = 0.1835 + 0.895 * (_b /", "check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self))", "(1 + _K2 * ((_b - _a) / (_b + _a))**2)) + (((self.tw**3", "0.5067 - 0.5588 * _DD + 1.3820 * _DD**2 _C2 = 0.3731 +", "0.4885 * _DD**2 _C4 = 0.0170 - 0.0079 * _DD - 0.0565 *", "self.area) # #return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz,", "Area about y self.Iz = ((self.a**3 * self.b / 8.0) * (2 *", "self.q)) * math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q)))", "math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi) # Area self.area = (0.50 *", "= math.sqrt(self.Iz / self.area) # # #return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy,", "Calculate the superellipse cross section properties Superellipses as a function of the powers", "+ b) * (1 + K3 * ((a-b)/(a+b))**2)) # Second Moment of Area", "K3 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 Iy =", "Formulas for stress, strain and strucutral matrices [W.D. Pilkey] 2.- Roark's formulas for", "self.q) / (2 * self.q)) * math.gamma((self.p + self.p * self.q + self.q)", "*= factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG =", "Centroid self.Zc = ((2.0 * _a * _K2 / math.pi) + (self.tw**2 *", "_thetaG - math.sin(2 * _thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2))) # Second", "_units_input = self.units_in else: print(' ** error input units not provided') print(' process", "math.pi * _a))) _Zc1 = _a + self.tw / 2.0 - self.Zc self.Yc", "self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *= factors[0]", "about Mayor Axis # -------------------------------------- K4 = 0.1835 + 0.895 * a/b -", "= 0.0203 + 1.8999 * _DD - 3.4356 * _DD**2 _C4 = 0.0578", "# class HollowSemiellipse: \"\"\" Calculate the section properties of a Hollow Semiellipse with", "self.units_in = _units_output self.a *= factors[0] self.b *= factors[0] self.p *= factors[0] self.q", "_DD = self.tw / _tmax _DD = max(_DD , 0.20) _DD = min(_DD", "/ 10**4) # Second Moment of Area about the horizontal centroidal C self.Ic", "Wall thickness Returns ---------- area: Section area Zc : Elastic neutral centre Yc", "0, Ic2 = 0, Iy2 = 0): \"\"\" Elliptical Sections Profiles Extension Open", "self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances from the", "centre Iy : Second moment of area about mayor axis Zey : Elastic", "(_a + _b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- # Radius of gyration self.ry", "0.6608 * _DD + 1.4222 * _DD**2 _C3 = 0.0203 + 1.8999 *", "1.8874 * _DD # else : sys.exit('error a/b > 4 or a/b <", "- self.Zc _z2 = self.Zc # elastic section moduli self.Zey = min(self.Ic /", "factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *= factors[0] self.b *= factors[0]", "Radius of gyration about minor Axis Notes ---------- Uses formulas from: 1.- Geometric", "self.p * self.q + self.q) / (self.p * self.q))))) # Centroid self.Zc =", "(_a / _b)**3)) _Yp = 0 # Plastic section moduli mayor axis self.Zpy", "for the design of unusual member cross-sections in bending [<NAME>] Examples ---------- \"\"\"", "= 2 * _b**2 / _a if self.tw > _tmax : sys.exit('error :", "# class SuperEllipse: \"\"\" Calculate the superellipse cross section properties Superellipses as a", "0.00978 * (a/b)**2 Zey = 1.3333 * t * a * (a +", "(_b + 3 * _a)) * (1 + _K2 * ((_b - _a)", "perimeter at # the ends of the mayor axis if this # maximum", "cross-sections in bending [A.J. Sadowski] Examples ---------- \"\"\" def __init__(self): # Build [WELDED", "add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate the superellipse cross section properties", "0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 * _DD _C7 = -0.1218 +", "the design of unusual member cross-sections in bending [A.J. Sadowski] Examples ---------- \"\"\"", "open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate the superellipse cross section", "* ((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p +", "= Yc1 Ic2 = Ic1 Iy2 = Iy1 _d = b1 + b2", "_C6 / (_a / _b) + _C7 / (_a / _b)**2 + _C8", "- 3.4356 * _DD**2 _C4 = 0.0578 - 1.6666 * _DD + 2.6012", "Plastic modulus about minor axis SFz : Shape factor minor axis rz :", "self.build = 'welded' # Shear Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness", "if _a/_b < 1.0 : _tmax = 2 * _a**2 / _b else:", "Axis # -------------------------------------- _K4 = 0.1349 + 0.1279 * (_a / _b) -", "Uses formulas from: 1.- Structural Engineering Formulas <NAME> Examples ---------- \"\"\" # Area", "Modulus about Minor Axis # -------------------------------------- K4 = 0.1835 + 0.895 * b/a", "units not provided') print(' process terminated') sys.exit() # units try: _units_output = self.units_out", "Zez : Elastic modulus about minor axis Zpz : Plastic modulus about minor", "import math # # package imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import", "self.a = float(a) self.b = float(b) self.theta = 90 self.p = float(p) self.q", "_thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi) # Area self.area =", "+ '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate", "_a if self.tw > _tmax : sys.exit('error : t > tmax') #------------------------------------------------- #", "0.3882 * _DD _C7 = -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 *", "self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50", "Radius of gyration about mayor Axis Iz : Second moment of area about", "= Iy1 + Iy2 # Extreme fibre distances _x1 = a _y1 =", "ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average'", "self.rz = math.sqrt(self.Iz / self.area) # # #return self.area, self.Zc, _Yc, self.Ic, _Zey,", "units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical Sections Profiles", "* _C**2 + 0.1097 * _C**3 _K3 = 1 + 0.9929 * _C", "_Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz # def print_file(self,", "- _Yc _y2 = _Yc # Elastic section moduli _Sy = min(_Iyy /", "self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) +", "0.4829 + 0.0725 * _DD - 0.1815 * _DD**2 _C2 = 0.1957 -", "factors[0] self.p *= factors[0] self.q *= factors[0] if self.p <= 0 or self.q", "_Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1,", "provided') print(' program aborted') sys.exit() # def geometry(self, a, b, p=2.0, q=2.0): #", "of Area about x self.Iy = ((self.a * self.b**3 / 16.0) * (4", "Yc2)**2)) _Iyy = Iy1 + Iy2 # Extreme fibre distances _x1 = a", "= a _y1 = _d - _Yc _y2 = _Yc # Elastic section", "find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\"", "self.area = ((self.tw * math.pi / 2.0) * (_a + _b) * (1.0", "Heigh b : Base tw : Wall thickness Returns ---------- area: Section area", "_unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # def get_property(self): # if", "- _b) / (_a + _b))**2)) # Centroid self.Zc = ((2.0 * _a", "neutral centre Iy : Second moment of area about mayor axis Zey :", "pressure/stress] \"\"\" for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit,", "= 0, Yc2 = 0, Ic2 = 0, Iy2 = 0): \"\"\" Elliptical", "_Sx = _Ixx / _x1 # radii of gyration _ry = math.sqrt(_Iyy /", "= 1/2pi) may be combined together to make a hollow closed cross-section with", "---------- \"\"\" # def __init__(self): # # Build [WELDED / ROLLED] self.build =", "symmetrical if b2 == 0: b2 = b1 A2 = A1 Yc2 =", "hollow_ellipse(a, b, t): \"\"\" a b t \"\"\" # Area K1 = 0.2464", "+ math.pi * t**3 / 16.0 * (3*b + a) * (1 +", "(1 + _K4 * ((_b - _a) / (_a + _b))**2)) + (self.tw**3", "self.Ic = self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances", "* self.tw # Note : there is a limit on the maximum #", "(2 * _thetaG - math.sin(2 * _thetaG))) # Second Moment of Area about", "0.3922 * math.sqrt(_DD) + 0.2960 * _DD _C6 = -0.6637 + 2.7357 *", "and elliptical segments cross section properties Parameters ---------- a : Mayor Axis b", "self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz # # def print_file(self, file_name):", "* r**4 _Iz1 = _Iy1 _Iz2 = _Iy2 return _Area, _Zc, _Yc, _Iy,", "((b-a)/(b+a))**2) + math.pi * t**3 / 16.0 * (3*b + a) * (1", "/ self.p) * math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p + self.p *", "_DD - 3.4356 * _DD**2 _C4 = 0.0578 - 1.6666 * _DD +", "of Area about y _Iz = 0.03843 * r**4 _Iz1 = _Iy1 _Iz2", "to make a hollow closed cross-section with finite thickness t, e.g. a tube,", "+ 1.8999 * _DD - 3.4356 * _DD**2 _C4 = 0.0578 - 1.6666", "tw : Wall thickness Returns ---------- area: Section area Zc : Elastic neutral", "= 'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force :", "_thetaG) self.Yc = 0 # Second Moment of Area about x self.Iy =", "b/a) Area = math.pi * t * (a + b) * (1 +", "(b + 2*a) * (1 + K4 * ((b-a)/(b+a))**2) + t**3 / 3.0", "self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi) #", "of the powers p and q Parameters ---------- a : Mayor Axis b", "_Yc = (A1 * (Yc1 + b2) + A2 * (b2 - Yc2))", "self.units_out = units.units_module(_unit, value, _units_in) # def get_property(self): # if self.units_in[0]: _units_input =", "from: 1.- Formulas for stress, strain and strucutral matrices [W.D. Pilkey] 2.- Roark's", "* math.sin(_thetaG)) / (3 * _thetaG) self.Yc = 0 # Second Moment of", "def units_input(self, **kwargs): \"\"\" Input: ====== length : [mandatory] force : temperature :", "ry : Radius of gyration about mayor Axis Iz : Second moment of", "Second Moment of Area about x self.Iy = ((self.a * self.b**3 / 16.0)", "of area about mayor axis Zey : Elastic modulus about mayor axis ry", "#return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz #", "self.q) / (self.p * self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0 / self.q)", "b1, A1, Yc1, Ic1, Iy1, b2 = 0, A2 = 0, Yc2 =", "_Yc = _Zc # Second Moment of Area about x _Iy = 0.07135", "1.4222 * _DD**2 _C3 = 0.0203 + 1.8999 * _DD - 3.4356 *", "area: Section area Zc : Elastic neutral centre Yc : Elastic neutral centre", "* _DD + 2.6012 * _DD**2 # _C5 = 0.22410 - 0.3922 *", "units.units_module(_unit, value, _units_in) # # # def get_property(self): # if self.units_in[0]: _units_input =", "units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature : \\n", "= units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *= factors[0] self.b *= factors[0] self.p", "except AttributeError: _units_output = self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in", "_A) # return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry #", "= 0 self.type = 'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length :", "self.Ic, _ry, _Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse()", "a b t \"\"\" # Area K1 = 0.2464 + 0.002222 * (a/b", "_b) _K1 = 0.2464 + 0.002222 * ((_a / _b) + (_b /", "0.3314 * _C + 0.0136 * _C**2 + 0.1097 * _C**3 _K3 =", "/ _b) + _C3 / (_a / _b)**2 + _C4 / (_a /", "self.q = 0 self.type = 'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length", "# units try: _units_output = self.units_out except AttributeError: _units_output = self.units_in self.units_out =", "# _a = self.d - 0.50 * self.tw _b = self.b / 2.0", "math.sqrt(_DD) - 1.8874 * _DD # else : sys.exit('error a/b > 4 or", "* self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances from the centroid to the", "= print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout", ": Second moment of area about minor axis Zez : Elastic modulus about", "about minor axis Zpz : Plastic modulus about minor axis SFz : Shape", "gyration about mayor Axis Iz : Second moment of area about minor axis", "* ((((self.tw * _b**2 * math.pi / 4.0) * (_b + 3 *", "= ((2.0 * self.a * self.b / self.q) * ((math.gamma(1.0 / self.q) *", "* _thetaG - math.sin(4 * _thetaG))) # Second Moment of Area about y", "+ 0.1279 * a/b - 0.01284 * (a/b)**2 Iz = (math.pi * t", "_a * _K2 / math.pi) + (self.tw**2 * _K3 / (6.0 * math.pi", "value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]:", "# Centroid self.Zc = ((2.0 * _a * _K2 / math.pi) + (self.tw**2", "Second Moment of Area about y self.Iz = ((self.a**3 * self.b / 8.0)", "r : radius Returns ---------- area: Section area Zc : Elastic neutral centre", "allowed in this case. # Cusps will form in the perimeter at #", "* (1 + K1 * ((a-b)/(a+b))**2) # Centroid Zc = a + t", "0.05489 * r**4 _Iy2 = math.pi * r**4 / 16.0 # Second Moment", "b2 == 0: b2 = b1 A2 = A1 Yc2 = Yc1 Ic2", "Wikipedia Examples ---------- \"\"\" # def __init__(self): # # Build [WELDED / ROLLED]", "about minor axis Zez : Elastic modulus about minor axis Zpz : Plastic", "(_b / _a) - 0.00978 * (_b / _a)**2 self.Zpz = (0.50 *", "centroidal C self.Ic = self.Iy - self.area * self.Zc**2 # The distances from", ": there is a limit on the maximum # wall thickness allowed in", "/ _b >= 1.0 and _a / _b < 4.0: _C1 = 0.4829", "* self.b * math.sin(_thetaG)) / (3 * _thetaG) self.Yc = 0 # Second", "sectors cross section properties Parameters ---------- a : Mayor Axis b : Minor", "(a/b)**2 K3 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 Iy", "0.1279 * b/a - 0.01284 * (b/a)**2 Iy = (math.pi * t *", "= ((2.0 * _a * _K2 / math.pi) + (self.tw**2 * _K3 /", "must be provided') print(' program aborted') sys.exit() # def geometry(self, a, b, p=2.0,", "Elastic modulus about mayor axis ry : Radius of gyration about mayor Axis", "# self.a = float(a) self.b = float(b) self.theta = 90 self.p = float(p)", "Second moment of full area _Ixx = ((Ic1 + A1 * (Yc1 +", "-------------------------------------- # Let Zp be the vertical distance from the bottom # to", "2019-2021 steelpy # # Python stdlib imports import math # # package imports", "math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx / _A) # return _A, _Yc, _x1,", "of Area about Mayor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 *", "0.01284 * (a/b)**2 K3 = 0.1349 + 0.1279 * b/a - 0.01284 *", "+ K2 * ((b-a)/(b+a))**2) + math.pi * t**3 / 16.0 * (3*b +", "= 0.3731 + 0.1938 * _DD - 1.4078 * _DD**2 _C3 = -0.140", "add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate a quarter", "about x self.Iy = ((2.0 * self.a * self.b**3 / self.q) * ((math.gamma(3.0", "{:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt'", "_K3 = 1 + 0.9929 * _C - 0.2287 * _C**2 - 0.2193", "+ 0.4885 * _DD**2 _C4 = 0.0170 - 0.0079 * _DD - 0.0565", "self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout", "class HollowSemiellipse: \"\"\" Calculate the section properties of a Hollow Semiellipse with constant", "+ 0.0179 * _DD + 0.4885 * _DD**2 _C4 = 0.0170 - 0.0079", "Calculate the circular and elliptical sectors cross section properties Parameters ---------- a :", "axis Zpz : Plastic modulus about minor axis SFz : Shape factor minor", "* (b/a)**2 Zez = 1.3333 * t * b * (b + 2*a)", "K3 * ((a-b)/(a+b))**2)) # Second Moment of Area about Minor Axis # --------------------------------------", "0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 Iz = (math.pi *", "_C6 = -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 * _DD _C7 =", "_x1 # radii of gyration _ry = math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx", "EllipticalSegment: \"\"\" Calculate the circular and elliptical segments cross section properties Parameters ----------", "self.Iy - self.area * self.Zc**2 # The distances from the centroid to the", "-0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 * _DD # else : sys.exit('error", ": temperature : gravity : [default : 9.81ms^2] ------ units [length, mass, time,", "self.b - self.Zc _z2 = self.Zc - self.b * math.cos(_thetaG) # elastic section", "section is symmetrical if b2 == 0: b2 = b1 A2 = A1", "_DD**2 _C3 = -0.140 + 0.0179 * _DD + 0.4885 * _DD**2 _C4", "= _d - _Yc _y2 = _Yc # Elastic section moduli _Sy =", "of Area about x self.Iy = ((self.a * self.b**3 / 8.0) * (2", "add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate the superellipse cross section properties Superellipses", "self.units_in) if self.units_in[0]: pass else: print('error length unit must be provided') print(' program", "for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in)", "+ 1.3820 * _DD**2 _C2 = 0.3731 + 0.1938 * _DD - 1.4078", "else: print(' ** error input units not provided') print(' process terminated') sys.exit() #", "(self.p * self.q))))) # Second Moment of Area about y self.Iz = ((2.0", "Zey = 1.3333 * t * a * (a + 2*b) * (1", "K4 * ((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic Modulus about Minor Axis", "Moment of Area about x self.Iy = ((self.a * self.b**3 / 8.0) *", ": [default : 9.81ms^2]\\n ------ units [length, mass, time, temperature, force, pressure/stress]/n \"\"\"", "Edition] 3.- Wikipedia Examples ---------- \"\"\" # def __init__(self): # # Build [WELDED", "---------- d : Section Heigh b : Base tw : Wall thickness Returns", "# Second Moment of Area about y self.Iz = ((self.a**3 * self.b /", "_Yc)**2) + (Ic2 + A2 * (_Yc - b2 + Yc2)**2)) _Iyy =", "* _DD _C7 = 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 * _DD", "(_a * (_C1 + _C2 / (_a / _b) + _C3 / (_a", "0.1349 + 0.1279 * (_b / _a) - 0.01284 * (_b / _a)**2", "* self.q + self.q) / (self.p * self.q))) / (math.gamma((2 * self.p +", "- 0.3314 * _C + 0.0136 * _C**2 + 0.1097 * _C**3 _K3", "(b/a)**2 Iy = (math.pi * t * a**2 / 4.0 * (a +", "0.00978 * (_b / _a)**2 self.Zpz = (0.50 * (((1.3333 * self.tw *", ": Section Heigh b : Base tw : Wall thickness Returns ---------- area:", "3.0))) #------------------------------------------------- # Radius of gyration self.ry = math.sqrt(self.Iy / self.area) self.rz =", "= _units_output self.d *= factors[0] #self.tw *= factors[0] #self.a *= factors[0] #self.ta *=", "+ _C6 / (_a / _b) + _C7 / (_a / _b)**2 +", "b + t / 2.0 # Second Moment of Area about Mayor Axis", "Axis # -------------------------------------- # Let Zp be the vertical distance from the bottom", "hollow closed cross-section with finite thickness t, e.g. a tube, hollow rod, pipe", "- 0.6608 * _DD + 1.4222 * _DD**2 _C3 = 0.0203 + 1.8999", "_C5 = -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 * _DD _C6 =", "* _DD**2 _C3 = 0.0203 + 1.8999 * _DD - 3.4356 * _DD**2", "Area about the horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2", "_K2 * ((_b - _a) / (_b + _a))**2)) + (((self.tw**3 * math.pi", "distances from the centroid to the extreme fibres _y1 = self.a _z1 =", "0.895 * b/a - 0.00978 * (b/a)**2 Zez = 1.3333 * t *", "b/a - 0.01284 * (b/a)**2 K3 = 0.1349 + 0.1279 * a/b -", "and strucutral matrices [W.D. Pilkey] 2.- Roark's formulas for stress and strain [7th", "minor axis rz : Radius of gyration about minor Axis SC : Shear", "(_Yc - b2 + Yc2)**2)) _Iyy = Iy1 + Iy2 # Extreme fibre", "2 * _thetaG))) # Centroid self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3) /", ": Plastic modulus about mayor axis SFy : Shape factor mayor axis ry", "# Second Moment of Area about y _Iz = 0.03843 * r**4 _Iz1", "(self.tw**3 / 3.0))) #------------------------------------------------- # Radius of gyration self.ry = math.sqrt(self.Iy / self.area)", "<= 0 or self.q <= 0: sys.exit(\"error p & q > 0\") #", "factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi)", "x self.Iy = ((self.a * self.b**3 / 16.0) * (4 * _thetaG -", "'.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate a", "Ic1 Iy2 = Iy1 _d = b1 + b2 # Total cross area", "*= factors[0] self.p *= factors[0] self.q *= factors[0] if self.p <= 0 or", ": Wall thickness Returns ---------- area: Section area Zc : Elastic neutral centre", "math.sin(2 * _thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2))) # Second Moment of", "= 0.5067 - 0.5588 * _DD + 1.3820 * _DD**2 _C2 = 0.3731", "self.ry = math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return self.area,", "# ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the section properties of a Hollow", "math.sin(2 * _thetaG))) # Second Moment of Area about the horizontal centroidal C", "= str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r):", "r**4 / 16.0 # Second Moment of Area about y _Iz = 0.03843", "+ 0.9929 * _C - 0.2287 * _C**2 - 0.2193 * _C**3 self.area", "(_a / _b)**2 self.Iz = 0.50 * ((((self.tw * _b**2 * math.pi /", "of gyration self.ry = math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz / self.area) #", "length unit must be provided') print(' program aborted') sys.exit() # def geometry(self, a,", "* math.pi / 2.0) * (_a + _b) * (1.0 + _K1 *", "- math.sin(2 * _thetaG))) # Second Moment of Area about the horizontal centroidal", "Mayor Axis b : Minor Axis p : q : Returns ---------- area:", "+ self.q) / (2 * self.q)) * math.gamma((self.p + self.p * self.q +", "minor Axis Notes ---------- Uses formulas from: 1.- Structural Engineering Formulas <NAME> Examples", "0.5588 * _DD + 1.3820 * _DD**2 _C2 = 0.3731 + 0.1938 *", "((Ic1 + A1 * (Yc1 + b2 - _Yc)**2) + (Ic2 + A2", "time, temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"]", "self.Zez = self.Iz / _y1 # plastic section moduli _Zpy = 0 _Zpz", "(_a + _b) * (1.0 + _K1 * ((_a - _b) / (_a", "0.2464 + 0.002222 * ((_a / _b) + (_b / _a)) _K2 =", "- 0.50 * self.tw _b = self.b / 2.0 - 0.50 * self.tw", "Plastic modulus about mayor axis SFy : Shape factor mayor axis ry :", "/ _b < 4.0: _C1 = 0.4829 + 0.0725 * _DD - 0.1815", "_Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp # def print_file(self, file_name):", "* _b**2 / _a if self.tw > _tmax : sys.exit('error : t >", "section properties Superellipses as a function of the powers p and q Parameters", "= 0): \"\"\" Elliptical Sections Profiles Extension Open cross-sections which are extended to", "b1 A2 = A1 Yc2 = Yc1 Ic2 = Ic1 Iy2 = Iy1", "self.Zc**2 # The distances from the centroid to the extreme fibres _y1 =", "_b) / (_a + _b))**2)) # Centroid self.Zc = ((2.0 * _a *", "_a)**2 self.Zpz = (0.50 * (((1.3333 * self.tw * _b * (_b +", "thickness allowed in this case. # Cusps will form in the perimeter at", "a _y1 = _d - _Yc _y2 = _Yc # Elastic section moduli", "* self.q + self.q) / (self.p * self.q))))) # Second Moment of Area", "math.pi / 2.0) * (_a + _b) * (1.0 + _K1 * ((_a", "_C**2 + 0.1097 * _C**3 _K3 = 1 + 0.9929 * _C -", "(3.0 * (2 * _thetaG - math.sin(2 * _thetaG)))) self.Yc = 0 #", "wall thickness allowed in this case. # Cusps will form in the perimeter", "0.01284 * (_b / _a)**2 _K3 = 0.1349 + 0.1279 * (_a /", "* (4 * _thetaG - math.sin(4 * _thetaG))) # Second Moment of Area", "#print('Jy',_Iz / 10**4) # Second Moment of Area about the horizontal centroidal C", "self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *= factors[0] #self.tw *=", "self.tw _b = self.b / 2.0 - 0.50 * self.tw # Note :", "*= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi) # Area", "* _C**3 self.area = ((self.tw * math.pi / 2.0) * (_a + _b)", "stress, strain and strucutral matrices [W.D. Pilkey] 2.- Roark's formulas for stress and", "self.Zpz = (0.50 * (((1.3333 * self.tw * _b * (_b + 2", "#------------------------------------------------- # Radius of gyration self.ry = math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz", "0.01284 * (_a / _b)**2 self.Iz = 0.50 * ((((self.tw * _b**2 *", "Mayor Axis # -------------------------------------- self.Zey = self.Iy / _Zc1 # self.Zez = self.Iz", "0.50 * self.tw # Note : there is a limit on the maximum", "aborted') sys.exit() # def geometry(self, a, b, p=2.0, q=2.0): # self.a = float(a)", "# def quarterCircle(r): \"\"\" Calculate a quarter of a circle Parameters ---------- r", "self.p * self.q + self.q) / (self.p * self.q))))) # Second Moment of", "_DD # elif _a / _b >= 1.0 and _a / _b <", "Tw. The midthickness perimeter is an ellipse 0.2 < a/b < 0.50 Parameters", "* _thetaG - math.sin(2 * _thetaG)))) self.Yc = 0 # Second Moment of", "force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] for key,", "2 * _a**2 / _b else: _tmax = 2 * _b**2 / _a", "1.- Geometric properties for the design of unusual member cross-sections in bending [A.J.", "+ 0.3882 * _DD _C7 = -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803", "-------------------------------------- K2 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 K3", "= self.a _z1 = self.b - self.Zc _z2 = self.Zc # elastic section", "_unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else: print('error", "_d - _Yc _y2 = _Yc # Elastic section moduli _Sy = min(_Iyy", "* ((_a - _b) / (_a + _b))**2))) self.Iy = _Iy - self.area", "+ _b)) * (1 + _K5 * ((_a - _b) / (_a +", "circumference (thetaG = 1/2pi) may be combined together to make a hollow closed", "self.Zpy = (4.0 * _a**2 * self.tw * (_C5 + _C6 / (_a", "# Cross-Sectional Area _C = (_a - _b) / (_a + _b) _K1", "/ _a) - 0.00978 * (_b / _a)**2 self.Zpz = (0.50 * (((1.3333", "K4 = 0.1835 + 0.895 * b/a - 0.00978 * (b/a)**2 Zez =", "0 # radii of gyration self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz", "self.area = self.a * self.b * _thetaG # Centroid self.Zc = (2 *", "1.- Geometric properties for the design of unusual member cross-sections in bending [<NAME>]", "+ 0.895 * (_b / _a) - 0.00978 * (_b / _a)**2 self.Zpz", "self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3", "about mayor axis ry : Radius of gyration about mayor Axis Iz :", "_Yc _y2 = _Yc # Elastic section moduli _Sy = min(_Iyy / _y1,", "stress and strain [7th Edition] 3.- Wikipedia Examples ---------- \"\"\" # def __init__(self):", "* self.a**3 * self.b / self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0 +", "Iy2 = Iy1 _d = b1 + b2 # Total cross area _A", "# def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature", "Semiellipse with constant wall thickness Tw. The midthickness perimeter is an ellipse 0.2", "* ((b-a)/(b+a))**2) + math.pi * t**3 / 16.0 * (3*b + a) *", "Iy : Second moment of area about mayor axis Zey : Elastic modulus", "_a)) * (1 + _K4 * ((_b - _a) / (_a + _b))**2))", "member cross-sections in bending [<NAME>] Examples ---------- \"\"\" def __init__(self): # Build [WELDED", "horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic / 10**4)", "_Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp # def print_file(self, file_name): check_out", "+ _C4 / (_a / _b)**3)) _Yp = 0 # Plastic section moduli", "/ self.p)) / (math.gamma((self.p + self.p * self.q + self.q) / (self.p *", "bending [A.J. Sadowski] Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED]", "> 0.25 and _a / _b < 1.0: _C1 = 0.5067 - 0.5588", "1 + 0.9929 * _C - 0.2287 * _C**2 - 0.2193 * _C**3", "* _a * _K2 / math.pi) + (self.tw**2 * _K3 / (6.0 *", "radii of gyration _ry = math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx / _A)", "# Note : there is a limit on the maximum # wall thickness", "_a) / (_a + _b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- # Radius of", "(Yc1 + b2 - _Yc)**2) + (Ic2 + A2 * (_Yc - b2", "= open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate a quarter of", "0.3563 * math.sqrt(_DD) - 0.1803 * _DD _C8 = 0.01540 - 0.0448 *", "/ self.area) self.rz = math.sqrt(self.Iz / self.area) # # #return self.area, self.Zc, _Yc,", "print(' program aborted') sys.exit() # def geometry(self, a, b, p=2.0, q=2.0): # self.a", "* (_a + 3 * _b)) * (1 + _K4 * ((_a -", "must be provided') print(' program aborted') sys.exit() # def geometry(self, **kwargs): for key,", "+ 0.1279 * b/a - 0.01284 * (b/a)**2 K3 = 0.1349 + 0.1279", "_Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out =", ", 0.20) _DD = min(_DD , 1.0) if _a / _b > 0.25", "0 self.q = 0 self.type = 'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n", "midthickness perimeter is an ellipse 0.2 < a/b < 0.50 Parameters ---------- d", "1.6666 * _DD + 2.6012 * _DD**2 # _C5 = 0.22410 - 0.3922", "* math.sqrt(_DD) + 0.0578 * _DD _C6 = 0.36740 - 0.8531 * math.sqrt(_DD)", "* ((_b - _a) / (_a + _b))**2)) + (self.tw**3 / 3.0))) #-------------------------------------------------", "self.b = float(b) self.theta = float(thetaG) self.p = 0 self.q = 0 self.type", "_thetaG - math.sin(2 * _thetaG))) # Second Moment of Area about the horizontal", "+ '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate", "+ A1 * (Yc1 + b2 - _Yc)**2) + (Ic2 + A2 *", "_DD**2 _C4 = 0.0170 - 0.0079 * _DD - 0.0565 * _DD**2 #", "_a / _b >= 1.0 and _a / _b < 4.0: _C1 =", "\"\"\" _units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] for key, value in", "self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi) #", "is exceeded. if _a/_b < 1.0 : _tmax = 2 * _a**2 /", "= 0, Ic2 = 0, Iy2 = 0): \"\"\" Elliptical Sections Profiles Extension", "axis SFz : Shape factor minor axis rz : Radius of gyration about", "self.p) / self.p)) / (math.gamma((self.p + self.p * self.q + self.q) / (self.p", "The distances from the centroid to the extreme fibres _y1 = self.a *", "* a * (a + 2*b) * (1 + K4 * ((a-b)/(a+b))**2) +", "(_a / _b) + _C3 / (_a / _b)**2 + _C4 / (_a", "(6.0 * _thetaG - math.sin(2 * _thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2)))", "area Zc : Elastic neutral centre Yc : Elastic neutral centre Iy :", "_K4 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_a", "0.0203 + 1.8999 * _DD - 3.4356 * _DD**2 _C4 = 0.0578 -", "(3*a + b) * (1 + K3 * ((a-b)/(a+b))**2)) # Second Moment of", "2.0) * (_a + _b) * (1.0 + _K1 * ((_a - _b)", "_units_in) # # # def get_property(self): # if self.units_in[0]: _units_input = self.units_in else:", "+ 3 * _a)) * (1 + _K2 * ((_b - _a) /", "self.theta = float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical Sector'", "0 or self.q <= 0: sys.exit(\"error p & q > 0\") # Area", "Minor Axis p : q : Returns ---------- area: Section area Zc :", "* _thetaG - math.sin(2 * _thetaG))) # Second Moment of Area about the", "* (((1.3333 * self.tw * _b * (_b + 2 * _a)) *", "# Total cross area _A = A1 + A2 # Centroidal C-axis of", "a, b, thetaG): # self.a = float(a) self.b = float(b) self.theta = float(thetaG)", "_C8 / (_a / _b)**3)) # Plastic section moduli minor axis _K4 =", "Zey : Elastic modulus about mayor axis ry : Radius of gyration about", "fibre distances _x1 = a _y1 = _d - _Yc _y2 = _Yc", "+ (self.tw**2 * _K3 / (6.0 * math.pi * _a))) _Zc1 = _a", "_C8 = -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 * _DD # else", "_ry, self.Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s}", "t / 2.0 Yc = b + t / 2.0 # Second Moment", ": [mandatory]\\n temperature : \\n gravity : [default : 9.81ms^2]\\n ------ units [length,", "* _DD - 0.0565 * _DD**2 # _C5 = -0.0292 + 0.3749 *", "def geometry(self, a, b, thetaG): # self.a = float(a) self.b = float(b) self.theta", "* _DD _C6 = 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 * _DD", "ends of the mayor axis if this # maximum is exceeded. if _a/_b", "= b1 + b2 # Total cross area _A = A1 + A2", "/ _b)**2 self.Iz = 0.50 * ((((self.tw * _b**2 * math.pi / 4.0)", "member cross-sections in bending [A.J. Sadowski] Examples ---------- \"\"\" def __init__(self): # Build", "# Elastic Modulus about Minor Axis # -------------------------------------- K4 = 0.1835 + 0.895", "distances from the centroid to the extreme fibres _y1 = self.a * math.sin(_thetaG)", "* math.pi) # Area self.area = (0.50 * self.a * self.b * (2", "area _A = A1 + A2 # Centroidal C-axis of full section _Yc", "'.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate the", "(math.gamma((2 * self.p + self.p * self.q + self.q) / (self.p * self.q)))))", "kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse' # def", "program aborted') sys.exit() # def geometry(self, **kwargs): for key, value in kwargs.items(): _dim", "stdlib imports import math # # package imports #import steelpy.units.control as units #from", "= self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *= factors[0] #self.tw", "math.sin(_thetaG)**2))) # Second Moment of Area about the horizontal centroidal C self.Ic =", "# The distances from the centroid to the extreme fibres _y1 = self.a", "_C3 = -0.140 + 0.0179 * _DD + 0.4885 * _DD**2 _C4 =", "for key, value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type =", "(_C5 + _C6 / (_a / _b) + _C7 / (_a / _b)**2", "\"second\", \"\", \"\", \"\"] for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out", "_K4 * ((_b - _a) / (_a + _b))**2)) + (self.tw**3 / 3.0)))", "self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) +", "# #return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz", "plastic neutral axis _DD = self.tw / _tmax _DD = max(_DD , 0.20)", "if this # maximum is exceeded. if _a/_b < 1.0 : _tmax =", "_ry # # def hollow_ellipse(a, b, t): \"\"\" a b t \"\"\" #", "finite thickness t, e.g. a tube, hollow rod, pipe or cylindrical shell, \"\"\"", "_K3 * ((_b - _a) / (_b + _a))**2))) # Elastic Modulus about", "* self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0 / self.q) * self.b /", "0.03843 * r**4 _Iz1 = _Iy1 _Iz2 = _Iy2 return _Area, _Zc, _Yc,", "Warping constant Notes ---------- Uses formulas from: 1.- Formulas for stress, strain and", "Elastic Modulus about Mayor Axis # -------------------------------------- K4 = 0.1835 + 0.895 *", "((math.pow(4, 1.0 / self.q) * self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 +", "1.3820 * _DD**2 _C2 = 0.3731 + 0.1938 * _DD - 1.4078 *", "math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi) # Area self.area = self.a *", "= math.pi * t * (a + b) * (1 + K1 *", "/ 32.0) * (3 * _a + _b)) * (1 + _K5 *", "_Iz2 # def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2 = 0, A2", "kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # def get_property(self): #", "# Build [WELDED / ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM /", ": Second moment of area about mayor axis Zey : Elastic modulus about", "= 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 K3 = 0.1349", "Radius of gyration about minor Axis SC : Shear centre Cw : Warping", "units.units_module(_unit, value, _units_in) # def get_property(self): # if self.units_in[0]: _units_input = self.units_in else:", "# Second Moment of Area about the horizontal centroidal C self.Ic = self.Iy", "# Elastic section moduli _Sy = min(_Iyy / _y1, _Iyy / _y2) _Sx", "= math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx / _A) # return _A, _Yc,", "\"\", \"\"] for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit,", "0.1815 * _DD**2 _C2 = 0.1957 - 0.6608 * _DD + 1.4222 *", "self.tw / 2.0 - self.Zc self.Yc = 0 _Yc1 = _b + self.tw", "3.9286 * _DD _C8 = -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 *", "0, Yc2 = 0, Ic2 = 0, Iy2 = 0): \"\"\" Elliptical Sections", "self.area) # #return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz,", "b2 = b1 A2 = A1 Yc2 = Yc1 Ic2 = Ic1 Iy2", "SC : Shear centre Cw : Warping constant Notes ---------- Uses formulas from:", "= (math.pi * t * a**2 / 4.0 * (a + 3*b) *", "moduli mayor axis self.Zpy = (4.0 * _a**2 * self.tw * (_C5 +", "(_C1 + _C2 / (_a / _b) + _C3 / (_a / _b)**2", "about Mayor Axis # -------------------------------------- _K4 = 0.1349 + 0.1279 * (_a /", "length unit must be provided') print(' program aborted') sys.exit() # def geometry(self, **kwargs):", "self.p *= factors[0] self.q *= factors[0] if self.p <= 0 or self.q <=", "* math.pi / 16.0) * (3 * _b + _a)) * (1 +", "of area about minor axis Zez : Elastic modulus about minor axis rz", "_Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp # def print_file(self, file_name): check_out =", "* (Yc1 + b2 - _Yc)**2) + (Ic2 + A2 * (_Yc -", "= _Zc # Second Moment of Area about x _Iy = 0.07135 *", "* r**4 _Iy2 = math.pi * r**4 / 16.0 # Second Moment of", "Uses formulas from: 1.- Formulas for stress, strain and strucutral matrices [W.D. Pilkey]", "the maximum # wall thickness allowed in this case. # Cusps will form", "Parameters ---------- r : radius Returns ---------- area: Section area Zc : Elastic", "+ 3 * self.q) / (self.p * self.q))))) #print('Jy',_Iz / 10**4) # Second", "geometry(self, **kwargs): for key, value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value)", "geometry(self, a, b, thetaG): # self.a = float(a) self.b = float(b) self.theta =", "_Zez, _Zpz, _rz, _Zp # def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E}", ": q : Returns ---------- area: Section area Zc : Elastic neutral centre", "0.895 * a/b - 0.00978 * (a/b)**2 Zey = 1.3333 * t *", "/ 2.0 Yc = b + t / 2.0 # Second Moment of", "_thetaG - math.sin(2 * _thetaG)))) self.Yc = 0 # Second Moment of Area", "maximum is exceeded. if _a/_b < 1.0 : _tmax = 2 * _a**2", "Moment of Area about y self.Iz = ((self.a**3 * self.b / 8.0) *", "cross area _A = A1 + A2 # Centroidal C-axis of full section", "* (b + 3*a) * (1 + K2 * ((b-a)/(b+a))**2) + math.pi *", "the vertical distance from the bottom # to the plastic neutral axis _DD", "# Centroid _Zc = 4 * r / (3 * math.pi) _Yc =", "about x _Iy = 0.07135 * r**4 _Iy1 = 0.05489 * r**4 _Iy2", "A2 * (b2 - Yc2)) / _A # Second moment of full area", "may be combined together to make a hollow closed cross-section with finite thickness", "cross section properties Superellipses as a function of the powers p and q", "+ (Ic2 + A2 * (_Yc - b2 + Yc2)**2)) _Iyy = Iy1", "+ 2.6012 * _DD**2 # _C5 = 0.22410 - 0.3922 * math.sqrt(_DD) +", "function of the powers p and q Parameters ---------- a : Mayor Axis", "_Zpy = 0 _Zpz = 0 # radii of gyration self.ry = math.sqrt(self.Ic", "3 * self.q) / (self.p * self.q))))) #print('Jy',_Iz / 10**4) # Second Moment", "a/b > 4 or a/b < 0.25') # Plastic neutral axis _Zp =", "_K4 = 0.1835 + 0.895 * (_b / _a) - 0.00978 * (_b", "# def hollow_ellipse(a, b, t): \"\"\" a b t \"\"\" # Area K1", "radius Returns ---------- area: Section area Zc : Elastic neutral centre Yc :", "/ _b) + _C7 / (_a / _b)**2 + _C8 / (_a /", "the ends of the mayor axis if this # maximum is exceeded. if", "self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p", "print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a,", "((self.a * self.b**3 / 8.0) * (2 * _thetaG + math.sin(2 * _thetaG)))", "+ 0.2960 * _DD _C6 = -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482", "gyration self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return", "* math.sqrt(_DD) + 0.0233 * _DD # elif _a / _b >= 1.0", "* (1 + _K4 * ((_a - _b) / (_a + _b))**2)) +", "/ 2.0 - 0.50 * self.tw # Note : there is a limit", "self.area = ((2.0 * self.a * self.b / self.q) * ((math.gamma(1.0 / self.q)", "_Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2 =", "exceeded. if _a/_b < 1.0 : _tmax = 2 * _a**2 / _b", "_C**2 - 0.2193 * _C**3 self.area = ((self.tw * math.pi / 2.0) *", "_units_output self.a *= factors[0] self.b *= factors[0] self.p *= factors[0] self.q *= factors[0]", "*= factors[0] self.q *= factors[0] if self.p <= 0 or self.q <= 0:", "/ 16.0) * (4 * _thetaG - math.sin(4 * _thetaG))) # Second Moment", "C-axis of full section _Yc = (A1 * (Yc1 + b2) + A2", "q=2.0): # self.a = float(a) self.b = float(b) self.theta = 90 self.p =", "__init__(self): # Build [WELDED / ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM", "0.3731 + 0.1938 * _DD - 1.4078 * _DD**2 _C3 = -0.140 +", "= math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc,", "_Iz2 = _Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2", "t, e.g. a tube, hollow rod, pipe or cylindrical shell, \"\"\" # check", "/ self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p + self.p *", "kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # # # def", "= (0.50 * self.a * self.b * (2 * _thetaG - math.sin( 2", "_Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1,", "* ((a-b)/(a+b))**2) + math.pi * t**3 / 16.0 * (3*a + b) *", "(1 + K3 * ((a-b)/(a+b))**2)) # Second Moment of Area about Minor Axis", "# # Copyright (c) 2019-2021 steelpy # # Python stdlib imports import math", "= (0.50 * (((1.3333 * self.tw * _b * (_b + 2 *", "or a/b < 0.25') # Plastic neutral axis _Zp = (_a * (_C1", "axis rz : Radius of gyration about minor Axis Notes ---------- Uses formulas", "Sections Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the section properties of", ": _tmax = 2 * _a**2 / _b else: _tmax = 2 *", "* _thetaG - math.sin(2 * _thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2))) #", "from the centroid to the extreme fibres _y1 = self.a _z1 = self.b", "self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG", "Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature", "\"\", \"\"] def units_input(self, **kwargs): \"\"\" Input: ====== length : [mandatory] force :", "self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name)", "= 0.07135 * r**4 _Iy1 = 0.05489 * r**4 _Iy2 = math.pi *", "#from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical Sections Profiles #", "Iy1 _d = b1 + b2 # Total cross area _A = A1", "((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p + self.p", "# -------------------------------------- # Let Zp be the vertical distance from the bottom #", "0.50 * self.tw _b = self.b / 2.0 - 0.50 * self.tw #", "Moment of Area about x self.Iy = ((self.a * self.b**3 / 16.0) *", "/ _b) + (_b / _a)) _K2 = 1 - 0.3314 * _C", "# Centroid self.Zc = (2 * self.b * math.sin(_thetaG)) / (3 * _thetaG)", "def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self))", "_C**3 self.area = ((self.tw * math.pi / 2.0) * (_a + _b) *", "self.type = 'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force", "self.tw / 2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- # Second Moment of Area", "= min(_DD , 1.0) if _a / _b > 0.25 and _a /", "(2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2 * self.q)) * math.gamma((self.p", "check if section is symmetrical if b2 == 0: b2 = b1 A2", "= min(_Iyy / _y1, _Iyy / _y2) _Sx = _Ixx / _x1 #", "value, _units_in) # # # def get_property(self): # if self.units_in[0]: _units_input = self.units_in", "+ math.pi * t**3 / 16.0 * (3*a + b) * (1 +", "will form in the perimeter at # the ends of the mayor axis", "thickness Returns ---------- area: Section area Zc : Elastic neutral centre Yc :", "_Yc1 = _b + self.tw / 2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- #", "= ((self.a * self.b**3 / 16.0) * (4 * _thetaG - math.sin(4 *", "# self.a = float(a) self.b = float(b) self.theta = float(thetaG) self.p = 0", "(4 * _thetaG - math.sin(4 * _thetaG))) # Second Moment of Area about", "modulus about mayor axis SFy : Shape factor mayor axis ry : Radius", "to the extreme fibres _y1 = self.a * math.sin(_thetaG) _z1 = self.b -", "= _Iy - self.area * self.Zc**2 _K2 = 0.1349 + 0.1279 * (_b", "of full section _Yc = (A1 * (Yc1 + b2) + A2 *", "= 0 self.q = 0 self.type = 'Elliptical Segment' def units_output(self, **kwargs): \"\"\"", "of unusual member cross-sections in bending [A.J. Sadowski] Examples ---------- \"\"\" def __init__(self):", "combined together to make a hollow closed cross-section with finite thickness t, e.g.", "+ 0.002222 * (a/b + b/a) Area = math.pi * t * (a", "= 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 Iz = (math.pi", "about minor Axis SC : Shear centre Cw : Warping constant Notes ----------", "[mandatory]\\n force : [mandatory]\\n temperature : \\n gravity : [default : 9.81ms^2]\\n ------", "self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp #", "_Zpz = 0 # radii of gyration self.ry = math.sqrt(self.Ic / self.area) self.rz", "Moment of Area about x _Iy = 0.07135 * r**4 _Iy1 = 0.05489", "_b) + (_b / _a)) _K2 = 1 - 0.3314 * _C +", "neutral axis _DD = self.tw / _tmax _DD = max(_DD , 0.20) _DD", "_C1 = 0.5067 - 0.5588 * _DD + 1.3820 * _DD**2 _C2 =", "the circular and elliptical segments cross section properties Parameters ---------- a : Mayor", "math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p + self.p * self.q + 3", "# Second Moment of Area about y self.Iz = ((2.0 * self.a**3 *", "Modulus about Mayor Axis # -------------------------------------- self.Zey = self.Iy / _Zc1 # self.Zez", "about mayor axis Zey : Elastic modulus about mayor axis ry : Radius", "= _Iy1 _Iz2 = _Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz,", "_thetaG - math.sin(4 * _thetaG))) # Second Moment of Area about y self.Iz", "+ _b) * (1.0 + _K1 * ((_a - _b) / (_a +", "- 5.3864 * math.sqrt(_DD) + 3.9286 * _DD _C8 = -0.8498 + 2.8763", "* (_a / _b)**2 self.Iz = 0.50 * ((((self.tw * _b**2 * math.pi", "# Centroidal C-axis of full section _Yc = (A1 * (Yc1 + b2)", "Zez = 1.3333 * t * b * (b + 2*a) * (1", "2.0482 * _DD _C7 = 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 *", "/ 2.0) * (_a + _b) * (1.0 + _K1 * ((_a -", "Elliptical Sections Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the section properties", "_z2) self.Zez = self.Iz / _y1 # plastic section moduli _Zpy = 0", "* ((_a - _b) / (_a + _b))**2)) # Centroid self.Zc = ((2.0", "_tmax : sys.exit('error : t > tmax') #------------------------------------------------- # Cross-Sectional Area _C =", ": Shear centre Cw : Warping constant Notes ---------- Uses formulas from: 1.-", "+ b2) + A2 * (b2 - Yc2)) / _A # Second moment", "< 4.0: _C1 = 0.4829 + 0.0725 * _DD - 0.1815 * _DD**2", "Extension Open cross-sections which are extended to half of the circumference (thetaG =", "Ic2 = Ic1 Iy2 = Iy1 _d = b1 + b2 # Total", "3*b) * (1 + K2 * ((a-b)/(a+b))**2) + math.pi * t**3 / 16.0", "\"\"\" Calculate a quarter of a circle Parameters ---------- r : radius Returns", "_Ixx = ((Ic1 + A1 * (Yc1 + b2 - _Yc)**2) + (Ic2", "((((self.tw * _b**2 * math.pi / 4.0) * (_b + 3 * _a))", "min(_DD , 1.0) if _a / _b > 0.25 and _a / _b", "# Elliptical Sections Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the section", "Elliptical Sections Profiles Extension Open cross-sections which are extended to half of the", "of Area about Mayor Axis # -------------------------------------- _K4 = 0.1349 + 0.1279 *", "_dim, value) self.type = 'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n length", "*= factors[0] # _a = self.d - 0.50 * self.tw _b = self.b", "(_a / _b) - 0.01284 * (_a / _b)**2 _K5 = 0.1349 +", "the perimeter at # the ends of the mayor axis if this #", "self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz # def print_file(self,", "- 0.01284 * (a/b)**2 K3 = 0.1349 + 0.1279 * b/a - 0.01284", "_a)) * (1 + _K3 * ((_b - _a) / (_b + _a))**2)))", "_Iy1 _Iz2 = _Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1,", "- self.Zc self.Yc = 0 _Yc1 = _b + self.tw / 2.0 #-------------------------------------------------", "/ (self.p * self.q))))) self.Yc = 0 # Second Moment of Area about", "0.00978 * (b/a)**2 Zez = 1.3333 * t * b * (b +", "* t**3 / 16.0 * (3*a + b) * (1 + K3 *", "force, pressure/stress] \"\"\" for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in =", "* (_C5 + _C6 / (_a / _b) + _C7 / (_a /", "about y _Iz = 0.03843 * r**4 _Iz1 = _Iy1 _Iz2 = _Iy2", "about mayor axis SFy : Shape factor mayor axis ry : Radius of", "# Extreme fibre distances _x1 = a _y1 = _d - _Yc _y2", "* b * (b + 2*a) * (1 + K4 * ((b-a)/(b+a))**2) +", "temperature : gravity : [default : 9.81ms^2] ------ units [length, mass, time, temperature,", "a, b, p=2.0, q=2.0): # self.a = float(a) self.b = float(b) self.theta =", "math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3 * self.p + self.p * self.q", "factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi) # Area self.area", "* (a/b)**2 Zey = 1.3333 * t * a * (a + 2*b)", "check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt'", "self.b / 8.0) * (2 * _thetaG - math.sin(2 * _thetaG))) # Second", "__init__(self): # # Build [WELDED / ROLLED] self.build = 'welded' # Shear Stress", "- Yc2)) / _A # Second moment of full area _Ixx = ((Ic1", "0.01284 * (_b / _a)**2 _Iy = ((((self.tw * _a**2 * math.pi /", "= self.units_out except AttributeError: _units_output = self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input,", "* a/b - 0.00978 * (a/b)**2 Zey = 1.3333 * t * a", "= 0 # Second Moment of Area about x self.Iy = ((2.0 *", "[7th Edition] 3.- Wikipedia Examples ---------- \"\"\" # def __init__(self): # # Build", "+ 3*b) * (1 + K2 * ((a-b)/(a+b))**2) + math.pi * t**3 /", "Yc = b + t / 2.0 # Second Moment of Area about", "thickness t, e.g. a tube, hollow rod, pipe or cylindrical shell, \"\"\" #", "= [\"\", \"\", \"second\", \"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\" Input: ======", "# _C5 = -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 * _DD _C6", "of gyration about minor Axis Notes ---------- Uses formulas from: 1.- Geometric properties", "add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate the circular", "this # maximum is exceeded. if _a/_b < 1.0 : _tmax = 2", "_ry, _Iz, _Zez, _Zpz, _rz, _Zp # def print_file(self, file_name): check_out = print_header()", "math.pi * t**3 / 16.0 * (3*b + a) * (1 + K3", "0.2464 + 0.002222 * (a/b + b/a) Area = math.pi * t *", ": Radius of gyration about minor Axis Notes ---------- Uses formulas from: 1.-", "t \"\"\" # Area K1 = 0.2464 + 0.002222 * (a/b + b/a)", "{:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0])", "Elastic neutral centre Yc : Elastic neutral centre Iy : Second moment of", "b : Minor Axis thetaG : Angle (degrees) Returns ---------- area: Section area", "self.b * math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG - math.sin(2 * _thetaG))))", "/ 8.0) * (2 * _thetaG + math.sin(2 * _thetaG))) # Second Moment", "Area self.area = (0.50 * self.a * self.b * (2 * _thetaG -", "y self.Iz = ((2.0 * self.a**3 * self.b / self.p) * ((math.gamma(3.0 /", "4 or a/b < 0.25') # Plastic neutral axis _Zp = (_a *", "form in the perimeter at # the ends of the mayor axis if", "Axis b : Minor Axis thetaG : Angle (degrees) Returns ---------- area: Section", "-0.140 + 0.0179 * _DD + 0.4885 * _DD**2 _C4 = 0.0170 -", "math.pi) _Yc = _Zc # Second Moment of Area about x _Iy =", "= self.Iz / _y1 # plastic section moduli _Zpy = 0 _Zpz =", "= ((self.a * self.b**3 / 8.0) * (2 * _thetaG + math.sin(2 *", "self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout =", "_Iyy / _y2) _Sx = _Ixx / _x1 # radii of gyration _ry", "gravity : [default : 9.81ms^2]\\n ------ units [length, mass, time, temperature, force, pressure/stress]/n", "[W.D. Pilkey] 2.- Roark's formulas for stress and strain [7th Edition] 3.- Wikipedia", "Second Moment of Area about x self.Iy = ((2.0 * self.a * self.b**3", "/ 3.0))) #------------------------------------------------- # Radius of gyration self.ry = math.sqrt(self.Iy / self.area) self.rz", "self.type = 'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n", "self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *=", "Moment of Area about Minor Axis # -------------------------------------- K2 = 0.1349 + 0.1279", "+ 0.1279 * (_a / _b) - 0.01284 * (_b / _a)**2 _Iy", "Copyright (c) 2019-2021 steelpy # # Python stdlib imports import math # #", "+ b2 - _Yc)**2) + (Ic2 + A2 * (_Yc - b2 +", "#print('Jx',self.Ic / 10**4) # The distances from the centroid to the extreme fibres", "distances _x1 = a _y1 = _d - _Yc _y2 = _Yc #", "def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature :", "a limit on the maximum # wall thickness allowed in this case. #", "/ _Yc1 # Plastic Modulus about Mayor Axis # -------------------------------------- # Let Zp", "_DD _C6 = -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 * _DD _C7", "of Area about x self.Iy = ((2.0 * self.a * self.b**3 / self.q)", "terminated') sys.exit() # units try: _units_output = self.units_out except AttributeError: _units_output = self.units_in", "/ 10**4) # The distances from the centroid to the extreme fibres _y1", "a circle Parameters ---------- r : radius Returns ---------- area: Section area Zc", "+ b2 # Total cross area _A = A1 + A2 # Centroidal", "= 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 K3 = 0.1349", "# wall thickness allowed in this case. # Cusps will form in the", "+ _K3 * ((_b - _a) / (_b + _a))**2))) # Elastic Modulus", "if self.p <= 0 or self.q <= 0: sys.exit(\"error p & q >", "* math.gamma((1.0 + self.q) / self.q)) / (math.gamma((self.p + self.p * self.q +", "_y1 = _d - _Yc _y2 = _Yc # Elastic section moduli _Sy", "+ _a)) * (1 + _K3 * ((_b - _a) / (_b +", "of Area about the horizontal centroidal C self.Ic = self.Iy - self.area *", "self.tw > _tmax : sys.exit('error : t > tmax') #------------------------------------------------- # Cross-Sectional Area", "0.50 * math.pi) # Area self.area = self.a * self.b * _thetaG #", "d : Section Heigh b : Base tw : Wall thickness Returns ----------", "import (find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical Sections Profiles # ---------------------------------------- #", "Zpz : Plastic modulus about minor axis SFz : Shape factor minor axis", "_units_output = self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output", "* (_b + 3 * _a)) * (1 + _K2 * ((_b -", "_Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz # def", "a/b - 0.01284 * (a/b)**2 Iz = (math.pi * t * b**2 /", "float(p) self.q = float(q) self.type = 'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n", "+ _b))**2)) # Centroid self.Zc = ((2.0 * _a * _K2 / math.pi)", "= b1 A2 = A1 Yc2 = Yc1 Ic2 = Ic1 Iy2 =", "* b/a - 0.00978 * (b/a)**2 Zez = 1.3333 * t * b", "_b * (_b + 2 * _a)) * (1 + _K4 * ((_b", "Yc1 Ic2 = Ic1 Iy2 = Iy1 _d = b1 + b2 #", "self.p * self.q + self.q) / (self.p * self.q))) / (math.gamma((2 * self.p", "the horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic /", "* _b + _a)) * (1 + _K3 * ((_b - _a) /", "((_a - _b) / (_a + _b))**2))) self.Iy = _Iy - self.area *", "t * (a + b) * (1 + K1 * ((a-b)/(a+b))**2) # Centroid", "b**2 / 4.0 * (b + 3*a) * (1 + K2 * ((b-a)/(b+a))**2)", "* _DD - 1.4078 * _DD**2 _C3 = -0.140 + 0.0179 * _DD", "self.q))) / (math.gamma((2 * self.p + self.p * self.q + self.q) / (self.p", "/ (_a / _b)**2 + _C8 / (_a / _b)**3)) # Plastic section", "* self.p + self.p * self.q + self.q) / (self.p * self.q))))) self.Yc", "Area about Mayor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * a/b", "/ (math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q))))) #", "b1 + b2 # Total cross area _A = A1 + A2 #", "value) self.type = 'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n length :", "# def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2 = 0, A2 =", "0 # Second Moment of Area about x self.Iy = ((self.a * self.b**3", "design of unusual member cross-sections in bending [A.J. Sadowski] Examples ---------- \"\"\" def", "self.b *= factors[0] self.p *= factors[0] self.q *= factors[0] if self.p <= 0", "- _a) / (_a + _b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- # Radius", "= self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances from", "> _tmax : sys.exit('error : t > tmax') #------------------------------------------------- # Cross-Sectional Area _C", "* (3*b + a) * (1 + K3 * ((b-a)/(b+a))**2)) # Elastic Modulus", "_Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz # def print_file(self, file_name):", "rz : Radius of gyration about minor Axis Notes ---------- Uses formulas from:", "/ self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) /", "_Zpy, _ry, self.Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse()", "'Elliptical Segment' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n", "value, self.units_in) if self.units_in[0]: pass else: print('error length unit must be provided') print('", "= 0 self.type = 'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length :", "16.0 # Second Moment of Area about y _Iz = 0.03843 * r**4", "K2 * ((a-b)/(a+b))**2) + math.pi * t**3 / 16.0 * (3*a + b)", "+ _C2 / (_a / _b) + _C3 / (_a / _b)**2 +", "/ _a if self.tw > _tmax : sys.exit('error : t > tmax') #-------------------------------------------------", "print('error length unit must be provided') print(' program aborted') sys.exit() # def geometry(self,", "print('ok') # class EllipticalSector: \"\"\" Calculate the circular and elliptical sectors cross section", "units_input(self, **kwargs): \"\"\" Input: ====== length : [mandatory] force : temperature : gravity", "vertical distance from the bottom # to the plastic neutral axis _DD =", "* _b * (_b + 2 * _a)) * (1 + _K4 *", "\"\"\" Calculate the circular and elliptical segments cross section properties Parameters ---------- a", "- self.b * math.cos(_thetaG) # elastic section moduli self.Zey = min(self.Ic / _z1,", "properties for the design of unusual member cross-sections in bending [<NAME>] Examples ----------", "A1 Yc2 = Yc1 Ic2 = Ic1 Iy2 = Iy1 _d = b1", "/ _x1 # radii of gyration _ry = math.sqrt(_Iyy / _A) _rx =", "Hollow Semiellipse with constant wall thickness Tw. The midthickness perimeter is an ellipse", "properties for the design of unusual member cross-sections in bending [A.J. Sadowski] Examples", "Notes ---------- Uses formulas from: 1.- Formulas for stress, strain and strucutral matrices", "+ self.p * self.q + 3 * self.q) / (self.p * self.q))))) #print('Jy',_Iz", "1.3333 * t * a * (a + 2*b) * (1 + K4", "_C6 = 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 * _DD _C7 =", "math.sqrt(_DD) + 3.9286 * _DD _C8 = -0.8498 + 2.8763 * math.sqrt(_DD) -", "+ K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis # -------------------------------------- K4", "K4 = 0.1835 + 0.895 * a/b - 0.00978 * (a/b)**2 Zey =", "0.0170 - 0.0079 * _DD - 0.0565 * _DD**2 # _C5 = -0.0292", "0.1279 * (_b / _a) - 0.01284 * (_b / _a)**2 _K3 =", "_K3 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_a", "= 0, Iy2 = 0): \"\"\" Elliptical Sections Profiles Extension Open cross-sections which", "Area about x self.Iy = ((2.0 * self.a * self.b**3 / self.q) *", "2.0 # Second Moment of Area about Mayor Axis # -------------------------------------- K2 =", "the circumference (thetaG = 1/2pi) may be combined together to make a hollow", "self.Iz = 0.50 * ((((self.tw * _b**2 * math.pi / 4.0) * (_b", "= 1 - 0.3314 * _C + 0.0136 * _C**2 + 0.1097 *", "rod, pipe or cylindrical shell, \"\"\" # check if section is symmetrical if", "= 0 # Plastic section moduli mayor axis self.Zpy = (4.0 * _a**2", "Centroidal C-axis of full section _Yc = (A1 * (Yc1 + b2) +", "together to make a hollow closed cross-section with finite thickness t, e.g. a", "= 0 # Second Moment of Area about x self.Iy = ((self.a *", "axis ry : Radius of gyration about mayor Axis Iz : Second moment", "+ _b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- # Radius of gyration self.ry =", "# # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E}", "Axis # -------------------------------------- K4 = 0.1835 + 0.895 * a/b - 0.00978 *", "_b) / (_a + _b))**2))) self.Iy = _Iy - self.area * self.Zc**2 _K2", "\"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build = 'welded' # Shear", "+ _K5 * ((_a - _b) / (_a + _b))**2))) self.Iy = _Iy", "1.0) if _a / _b > 0.25 and _a / _b < 1.0:", "_a)) * (1 + _K2 * ((_b - _a) / (_b + _a))**2))", "_Iy = 0.07135 * r**4 _Iy1 = 0.05489 * r**4 _Iy2 = math.pi", "self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else: print('error length unit must", "+ 0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2 _K5", "#self.a *= factors[0] #self.ta *= factors[0] self.b *= factors[0] #self.tb *= factors[0] #", "Area about Mayor Axis # -------------------------------------- _K4 = 0.1349 + 0.1279 * (_a", "self.units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\" Input:", "value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # #", "/ (_a / _b)**3)) # Plastic section moduli minor axis _K4 = 0.1835", "= 0.2464 + 0.002222 * ((_a / _b) + (_b / _a)) _K2", "_Iy = ((((self.tw * _a**2 * math.pi / 8.0) * (_a + 3", ": Elastic modulus about minor axis Zpz : Plastic modulus about minor axis", "cross section properties Parameters ---------- a : Mayor Axis b : Minor Axis", "closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2 = 0, A2 = 0, Yc2", "= self.b - self.Zc _z2 = self.Zc # elastic section moduli self.Zey =", "/ self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) / self.q)) /", "/ _b) - 0.01284 * (_a / _b)**2 _K5 = 0.1349 + 0.1279", "(_a + _b) _K1 = 0.2464 + 0.002222 * ((_a / _b) +", "(_a / _b)**2 _K5 = 0.1349 + 0.1279 * (_a / _b) -", "steelpy # # Python stdlib imports import math # # package imports #import", "a/b < 0.25') # Plastic neutral axis _Zp = (_a * (_C1 +", "- 0.01284 * (b/a)**2 Iy = (math.pi * t * a**2 / 4.0", "+ self.tw / 2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- # Second Moment of", "there is a limit on the maximum # wall thickness allowed in this", "= 'welded' # Shear Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness =", "Geometric properties for the design of unusual member cross-sections in bending [A.J. Sadowski]", "math.sqrt(self.Iz / self.area) # # #return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry,", "/ _A # Second moment of full area _Ixx = ((Ic1 + A1", "_ry, _Iz, _Zez, _Zpz, _rz # # def print_file(self, file_name): check_out = print_header_ellipse()", "Elastic modulus about minor axis Zpz : Plastic modulus about minor axis SFz", "0.0136 * _C**2 + 0.1097 * _C**3 _K3 = 1 + 0.9929 *", "else : sys.exit('error a/b > 4 or a/b < 0.25') # Plastic neutral", "(thetaG = 1/2pi) may be combined together to make a hollow closed cross-section", "temperature, force, pressure/stress] \"\"\" for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in", "Plastic section moduli mayor axis self.Zpy = (4.0 * _a**2 * self.tw *", "minor axis Zez : Elastic modulus about minor axis Zpz : Plastic modulus", "/ (_b + _a))**2))) # Elastic Modulus about Mayor Axis # -------------------------------------- self.Zey", "* ((_b - _a) / (_b + _a))**2))) # Elastic Modulus about Mayor", "/ 8.0) * (2 * _thetaG - math.sin(2 * _thetaG))) # Second Moment", "* self.q))) / (math.gamma((2 * self.p + self.p * self.q + self.q) /", "The midthickness perimeter is an ellipse 0.2 < a/b < 0.50 Parameters ----------", "= 0.1957 - 0.6608 * _DD + 1.4222 * _DD**2 _C3 = 0.0203", "# Centroid Zc = a + t / 2.0 Yc = b +", "* (2 * _thetaG - math.sin( 2 * _thetaG))) # Centroid self.Zc =", "= 0, A2 = 0, Yc2 = 0, Ic2 = 0, Iy2 =", "about minor Axis Notes ---------- Uses formulas from: 1.- Structural Engineering Formulas <NAME>", "((2.0 * self.a * self.b**3 / self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0", "# Centroid self.Zc = ((math.pow(4, 1.0 / self.q) * self.b / (2 *", "1.4078 * _DD**2 _C3 = -0.140 + 0.0179 * _DD + 0.4885 *", "_K2 / math.pi) + (self.tw**2 * _K3 / (6.0 * math.pi * _a)))", "= math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return self.area, _Zc,", "bending [<NAME>] Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build", "#------------------------------------------------- # Cross-Sectional Area _C = (_a - _b) / (_a + _b)", "# # Python stdlib imports import math # # package imports #import steelpy.units.control", "- 2.0482 * _DD _C7 = 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286", "pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] for key, value", "- 0.0079 * _DD - 0.0565 * _DD**2 # _C5 = -0.0292 +", "print(' program aborted') sys.exit() # def geometry(self, **kwargs): for key, value in kwargs.items():", "self.q)) / (math.gamma((self.p + self.p * self.q + 3 * self.q) / (self.p", "Input: ====== length : [mandatory] force : temperature : gravity : [default :", "* ((_a / _b) + (_b / _a)) _K2 = 1 - 0.3314", "-0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 * _DD _C8 = 0.01540 -", "+ Iy2 # Extreme fibre distances _x1 = a _y1 = _d -", "_z1 = self.b - self.Zc _z2 = self.Zc - self.b * math.cos(_thetaG) #", "_K2 = 0.1349 + 0.1279 * (_b / _a) - 0.01284 * (_b", "* _DD + 1.4222 * _DD**2 _C3 = 0.0203 + 1.8999 * _DD", "/ (_a + _b))**2)) # Centroid self.Zc = ((2.0 * _a * _K2", "str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\"", "self.p = 0 self.q = 0 self.type = 'Elliptical Sector' def units_output(self, **kwargs):", "A2 = 0, Yc2 = 0, Ic2 = 0, Iy2 = 0): \"\"\"", ": 9.81ms^2]\\n ------ units [length, mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in =", "/ 16.0) * (3 * _b + _a)) * (1 + _K3 *", "= self.units_in self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d", "Base tw : Wall thickness Returns ---------- area: Section area Zc : Elastic", "* math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2 * self.q)) * math.gamma((self.p +", "(math.gamma((self.p + self.p * self.q + 3 * self.q) / (self.p * self.q)))))", "= Ic1 Iy2 = Iy1 _d = b1 + b2 # Total cross", "_b >= 1.0 and _a / _b < 4.0: _C1 = 0.4829 +", "_Iz, _Zez, _Zpz, _rz, _Zp # def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s}", "self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances from the centroid to the extreme", "* t * (a + b) * (1 + K1 * ((a-b)/(a+b))**2) #", "#return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz #", "= math.pi * r**2 / 4.0 # # Centroid _Zc = 4 *", "+ (self.tw**3 / 3.0))) #------------------------------------------------- # Radius of gyration self.ry = math.sqrt(self.Iy /", "str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # def quarterCircle(r): \"\"\"", "+ _K2 * ((_b - _a) / (_b + _a))**2)) + (((self.tw**3 *", "= 0 _Zpz = 0 # radii of gyration self.ry = math.sqrt(self.Ic /", "units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # # # def get_property(self): # if", "1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 * _DD _C8 = -0.8498 +", "Iz = (math.pi * t * b**2 / 4.0 * (b + 3*a)", "for the design of unusual member cross-sections in bending [A.J. Sadowski] Examples ----------", "_b) / (_a + _b))**2)) + (((self.tw**3 * math.pi / 32.0) * (3", "math.pi / 4.0) * (_b + 3 * _a)) * (1 + _K2", "factors[0] # _a = self.d - 0.50 * self.tw _b = self.b /", "# Plastic neutral axis _Zp = (_a * (_C1 + _C2 / (_a", "0.1279 * a/b - 0.01284 * (a/b)**2 K3 = 0.1349 + 0.1279 *", "to the plastic neutral axis _DD = self.tw / _tmax _DD = max(_DD", "((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p + self.p", "self.area = (0.50 * self.a * self.b * (2 * _thetaG - math.sin(", "* _thetaG # Centroid self.Zc = (2 * self.b * math.sin(_thetaG)) / (3", "_Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\"", "AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A' self.units_in = [\"\", \"\", \"second\", \"\",", "self.a * self.b**3 / self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p)", "self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) +", ".format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout", "_Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz # #", "class EllipticalSector: \"\"\" Calculate the circular and elliptical sectors cross section properties Parameters", "self.units_out = self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *= factors[0]", "in bending [<NAME>] Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED]", "/ _a) - 0.01284 * (_b / _a)**2 _K3 = 0.1349 + 0.1279", "self.Ic = self.Iy - self.area * self.Zc**2 # The distances from the centroid", "**kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature : \\n gravity", "= _Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 #", "= 'N/A' self.units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] def units_input(self, **kwargs):", "5.3864 * math.sqrt(_DD) + 3.9286 * _DD _C8 = -0.8498 + 2.8763 *", "Moment of Area about y _Iz = 0.03843 * r**4 _Iz1 = _Iy1", "* r / (3 * math.pi) _Yc = _Zc # Second Moment of", "Elastic neutral centre Iy : Second moment of area about mayor axis Zey", "_Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz # def print_file(self, file_name):", "_b)**2 self.Iz = 0.50 * ((((self.tw * _b**2 * math.pi / 4.0) *", "- math.sin(2 * _thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2))) # Second Moment", "= self.Iy / _Zc1 # self.Zez = self.Iz / _Yc1 # Plastic Modulus", "_Iz1, _Iz2 # def closed_cross_section(a, b1, A1, Yc1, Ic1, Iy1, b2 = 0,", "_Yc # Elastic section moduli _Sy = min(_Iyy / _y1, _Iyy / _y2)", "_b < 4.0: _C1 = 0.4829 + 0.0725 * _DD - 0.1815 *", "(_b + 2 * _a)) * (1 + _K4 * ((_b - _a)", "* ((math.gamma((2.0 + self.q) / (2 * self.q)) * math.gamma((self.p + self.p *", "- 0.1803 * _DD _C8 = 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233", "* (_b / _a) - 0.00978 * (_b / _a)**2 self.Zpz = (0.50", "of Area about y self.Iz = ((self.a**3 * self.b / 8.0) * (2", "= -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 * _DD _C6 = 0.36740", "* self.a * self.b**3 / self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0 +", "_DD + 1.4222 * _DD**2 _C3 = 0.0203 + 1.8999 * _DD -", "---------- area: Section area Zc : Elastic neutral centre Yc : Elastic neutral", "* math.pi) # Area self.area = self.a * self.b * _thetaG # Centroid", "self.tw # Note : there is a limit on the maximum # wall", "+ b/a) Area = math.pi * t * (a + b) * (1", "e.g. a tube, hollow rod, pipe or cylindrical shell, \"\"\" # check if", "# #return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz,", "0\") # Area self.area = ((2.0 * self.a * self.b / self.q) *", "area _Ixx = ((Ic1 + A1 * (Yc1 + b2 - _Yc)**2) +", "at # the ends of the mayor axis if this # maximum is", "# # def get_property(self): # if self.units_in[0]: _units_input = self.units_in else: print(' **", "= ((((self.tw * _a**2 * math.pi / 8.0) * (_a + 3 *", "= open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate the circular and", "_C5 = 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 * _DD _C6 =", "Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the section properties of a", "* self.b / 8.0) * (2 * _thetaG - math.sin(2 * _thetaG))) #", "full section _Yc = (A1 * (Yc1 + b2) + A2 * (b2", "# Python stdlib imports import math # # package imports #import steelpy.units.control as", "<NAME> Examples ---------- \"\"\" # Area _Area = math.pi * r**2 / 4.0", "/ 24.0) * (6.0 * _thetaG - math.sin(2 * _thetaG) * (3.0 +", "of unusual member cross-sections in bending [<NAME>] Examples ---------- \"\"\" def __init__(self): #", "3 * _a)) * (1 + _K2 * ((_b - _a) / (_b", "# def geometry(self, a, b, thetaG): # self.a = float(a) self.b = float(b)", "_rz # # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}", "# Radius of gyration self.ry = math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz /", "self.Iz = ((self.a**3 * self.b / 8.0) * (2 * _thetaG - math.sin(2", "_Area = math.pi * r**2 / 4.0 # # Centroid _Zc = 4", "pass else: print('error length unit must be provided') print(' program aborted') sys.exit() #", "math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q))) / (math.gamma((2", "_A) _rx = math.sqrt(_Ixx / _A) # return _A, _Yc, _x1, _Ixx, _Sx,", "* (3 * _b + _a)) * (1 + _K3 * ((_b -", "file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self))", "/ (_a / _b) + _C7 / (_a / _b)**2 + _C8 /", "Area about y self.Iz = ((2.0 * self.a**3 * self.b / self.p) *", "= 0.0578 - 1.6666 * _DD + 2.6012 * _DD**2 # _C5 =", "4 * r / (3 * math.pi) _Yc = _Zc # Second Moment", "- 0.01284 * (b/a)**2 K3 = 0.1349 + 0.1279 * a/b - 0.01284", "K2 * ((b-a)/(b+a))**2) + math.pi * t**3 / 16.0 * (3*b + a)", "Mayor Axis # -------------------------------------- K4 = 0.1835 + 0.895 * a/b - 0.00978", "_Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp # def print_file(self,", "check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0])", "- 0.01284 * (_a / _b)**2 _K5 = 0.1349 + 0.1279 * (_a", "self.a * self.b * _thetaG # Centroid self.Zc = (2 * self.b *", "math.pi * t * (a + b) * (1 + K1 * ((a-b)/(a+b))**2)", "about the horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2 #print('Jx',self.Ic", "= _b + self.tw / 2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- # Second", "math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2 * self.q)) * math.gamma((self.p + self.p", "float(b) self.theta = 90 self.p = float(p) self.q = float(q) self.type = 'Super", "formulas from: 1.- Structural Engineering Formulas <NAME> Examples ---------- \"\"\" # Area _Area", "_Zc = 4 * r / (3 * math.pi) _Yc = _Zc #", "- 0.0448 * math.sqrt(_DD) + 0.0233 * _DD # elif _a / _b", "# if self.units_in[0]: _units_input = self.units_in else: print(' ** error input units not", "* (3 * _a + _b)) * (1 + _K5 * ((_a -", "+ _K4 * ((_a - _b) / (_a + _b))**2)) + (((self.tw**3 *", "0.0725 * _DD - 0.1815 * _DD**2 _C2 = 0.1957 - 0.6608 *", "input units not provided') print(' process terminated') sys.exit() # units try: _units_output =", "of gyration self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) #", "+ K4 * ((b-a)/(b+a))**2) + t**3 / 3.0 return Area, Zc, Yc, Iy,", "axis Zpy : Plastic modulus about mayor axis SFy : Shape factor mayor", "= ((4.0 * self.b * math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG -", "/ _b > 0.25 and _a / _b < 1.0: _C1 = 0.5067", "in this case. # Cusps will form in the perimeter at # the", ": sys.exit('error : t > tmax') #------------------------------------------------- # Cross-Sectional Area _C = (_a", "= print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout =", "a function of the powers p and q Parameters ---------- a : Mayor", "Axis SC : Shear centre Cw : Warping constant Notes ---------- Uses formulas", "= self.units_in else: print(' ** error input units not provided') print(' process terminated')", "/ 16.0 * (3*a + b) * (1 + K3 * ((a-b)/(a+b))**2)) #", "= float(b) self.theta = 90 self.p = float(p) self.q = float(q) self.type =", ": Shape factor minor axis rz : Radius of gyration about minor Axis", "/ _z1, self.Ic / _z2) self.Zez = self.Iz / _y1 # plastic section", "---------- \"\"\" # Area _Area = math.pi * r**2 / 4.0 # #", "are extended to half of the circumference (thetaG = 1/2pi) may be combined", "* self.q))))) self.Yc = 0 # Second Moment of Area about x self.Iy", "_C - 0.2287 * _C**2 - 0.2193 * _C**3 self.area = ((self.tw *", "0.2193 * _C**3 self.area = ((self.tw * math.pi / 2.0) * (_a +", "formulas from: 1.- Geometric properties for the design of unusual member cross-sections in", "= 0 # radii of gyration self.ry = math.sqrt(self.Ic / self.area) self.rz =", "Mayor Axis b : Minor Axis thetaG : Angle (degrees) Returns ---------- area:", "+ self.q) / (self.p * self.q))) / (math.gamma((2 * self.p + self.p *", "_thetaG) * (3.0 + 2.0 * math.sin(_thetaG)**2))) # Second Moment of Area about", "float(a) self.b = float(b) self.theta = 90 self.p = float(p) self.q = float(q)", "(b + 3*a) * (1 + K2 * ((b-a)/(b+a))**2) + math.pi * t**3", "minor axis Zez : Elastic modulus about minor axis rz : Radius of", "*= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 *", "_a)**2 _K3 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 *", "hollow rod, pipe or cylindrical shell, \"\"\" # check if section is symmetrical", "# _C5 = 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 * _DD _C6", "self.area) # # #return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez,", "# # package imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, #", "# def print_file(self, file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw))", ": Elastic neutral centre Iy : Second moment of area about mayor axis", "axis Zey : Elastic modulus about mayor axis ry : Radius of gyration", "self.b / self.p) * ((math.gamma(3.0 / self.p) * math.gamma((1.0 + self.q) / self.q))", "steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical", "gyration about minor Axis Notes ---------- Uses formulas from: 1.- Structural Engineering Formulas", "Let Zp be the vertical distance from the bottom # to the plastic", "= 0.03843 * r**4 _Iz1 = _Iy1 _Iz2 = _Iy2 return _Area, _Zc,", "self.q + self.q) / (self.p * self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0", "float(b) self.theta = float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical", "-------------------------------------- K4 = 0.1835 + 0.895 * b/a - 0.00978 * (b/a)**2 Zez", "print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta))", "0.895 * (_b / _a) - 0.00978 * (_b / _a)**2 self.Zpz =", "split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w')", "/ (_a / _b) + _C3 / (_a / _b)**2 + _C4 /", "Input:\\n length : [mandatory]\\n force : [mandatory]\\n temperature : \\n gravity : [default", "the powers p and q Parameters ---------- a : Mayor Axis b :", "_a**2 * math.pi / 8.0) * (_a + 3 * _b)) * (1", "print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name)", "_Zez, _Zpz, _rz # # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E}", "10**4) # The distances from the centroid to the extreme fibres _y1 =", "(((1.3333 * self.tw * _b * (_b + 2 * _a)) * (1", "self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50", "- 0.3922 * math.sqrt(_DD) + 0.2960 * _DD _C6 = -0.6637 + 2.7357", "* (1 + K2 * ((b-a)/(b+a))**2) + math.pi * t**3 / 16.0 *", "+ _b))**2))) self.Iy = _Iy - self.area * self.Zc**2 _K2 = 0.1349 +", "self.p = 0 self.q = 0 self.type = 'Elliptical Segment' def units_output(self, **kwargs):", "(_a + _b))**2)) # Centroid self.Zc = ((2.0 * _a * _K2 /", "((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic Modulus about Minor Axis # --------------------------------------", "else: print('error length unit must be provided') print(' program aborted') sys.exit() # def", "mayor axis self.Zpy = (4.0 * _a**2 * self.tw * (_C5 + _C6", "* self.q)) * math.gamma((self.p + self.p * self.q + self.q) / (self.p *", "thickness Tw. The midthickness perimeter is an ellipse 0.2 < a/b < 0.50", "self.units_in else: print(' ** error input units not provided') print(' process terminated') sys.exit()", "(6.0 * math.pi * _a))) _Zc1 = _a + self.tw / 2.0 -", "_a) - 0.00978 * (_b / _a)**2 self.Zpz = (0.50 * (((1.3333 *", "_thetaG = min(abs(_thetaG), 0.50 * math.pi) # Area self.area = (0.50 * self.a", ": Shape factor mayor axis ry : Radius of gyration about mayor Axis", "0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 * _DD _C6 = -0.6637 +", "/ _a)) _K2 = 1 - 0.3314 * _C + 0.0136 * _C**2", "* _thetaG - math.sin( 2 * _thetaG))) # Centroid self.Zc = ((4.0 *", "self.a * math.sin(_thetaG) _z1 = self.b - self.Zc _z2 = self.Zc - self.b", "(self.p * self.q))))) self.Yc = 0 # Second Moment of Area about x", "of a circle Parameters ---------- r : radius Returns ---------- area: Section area", "mayor axis ry : Radius of gyration about mayor Axis Iz : Second", "* _thetaG + math.sin(2 * _thetaG))) # Second Moment of Area about y", "# #return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz", "Area about x _Iy = 0.07135 * r**4 _Iy1 = 0.05489 * r**4", "gyration about minor Axis Notes ---------- Uses formulas from: 1.- Geometric properties for", "_Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a, b1, A1, Yc1,", "t**3 / 16.0 * (3*b + a) * (1 + K3 * ((b-a)/(b+a))**2))", "a/b < 0.50 Parameters ---------- d : Section Heigh b : Base tw", "*= factors[0] self.b *= factors[0] #self.tb *= factors[0] # _a = self.d -", "# self.Zez = self.Iz / _Yc1 # Plastic Modulus about Mayor Axis #", "(find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical Sections Profiles # ---------------------------------------- # class", "0.2287 * _C**2 - 0.2193 * _C**3 self.area = ((self.tw * math.pi /", "self.p <= 0 or self.q <= 0: sys.exit(\"error p & q > 0\")", "p : q : Returns ---------- area: Section area Zc : Elastic neutral", "moduli _Zpy = 0 _Zpz = 0 # radii of gyration self.ry =", "factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG),", "extreme fibres _y1 = self.a _z1 = self.b - self.Zc _z2 = self.Zc", "_Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp # def", "length : [mandatory]\\n force : [mandatory]\\n temperature : \\n gravity : [default :", "= print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p,", "y _Iz = 0.03843 * r**4 _Iz1 = _Iy1 _Iz2 = _Iy2 return", "self.a _z1 = self.b - self.Zc _z2 = self.Zc # elastic section moduli", "- 0.01284 * (_b / _a)**2 _Iy = ((((self.tw * _a**2 * math.pi", "is symmetrical if b2 == 0: b2 = b1 A2 = A1 Yc2", "= 0.1835 + 0.895 * a/b - 0.00978 * (a/b)**2 Zey = 1.3333", "* math.gamma((1.0 + self.p) / self.p)) / (math.gamma((3 * self.p + self.p *", "-------------------------------------- K2 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 K3", "gyration about minor Axis SC : Shear centre Cw : Warping constant Notes", "= 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2 Iy = (math.pi", "about x self.Iy = ((self.a * self.b**3 / 16.0) * (4 * _thetaG", "Second Moment of Area about x self.Iy = ((self.a * self.b**3 / 8.0)", "thetaG : Angle (degrees) Returns ---------- area: Section area Zc : Elastic neutral", "axis self.Zpy = (4.0 * _a**2 * self.tw * (_C5 + _C6 /", "* self.Zc**2 # The distances from the centroid to the extreme fibres _y1", "quarterCircle(r): \"\"\" Calculate a quarter of a circle Parameters ---------- r : radius", "for stress and strain [7th Edition] 3.- Wikipedia Examples ---------- \"\"\" # def", "factors[0] self.q *= factors[0] if self.p <= 0 or self.q <= 0: sys.exit(\"error", "K3 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 Iz =", "area about minor axis Zez : Elastic modulus about minor axis Zpz :", "= max(_DD , 0.20) _DD = min(_DD , 1.0) if _a / _b", "((2.0 * self.a * self.b / self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0", "a : Mayor Axis b : Minor Axis p : q : Returns", "_DD _C8 = 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 * _DD #", "* (_b / _a)**2 _K3 = 0.1349 + 0.1279 * (_a / _b)", "def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b,", "2*b) * (1 + K4 * ((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic", "((self.a**3 * self.b / 24.0) * (6.0 * _thetaG - math.sin(2 * _thetaG)", "'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n", "(_a + 3 * _b)) * (1 + _K4 * ((_a - _b)", "* (1 + K4 * ((b-a)/(b+a))**2) + t**3 / 3.0 return Area, Zc,", "modulus about mayor axis ry : Radius of gyration about mayor Axis Iz", "get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n", "* _C - 0.2287 * _C**2 - 0.2193 * _C**3 self.area = ((self.tw", "self.a * self.b / self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p)", "self.p) / self.p)) / (math.gamma((3 * self.p + self.p * self.q + self.q)", "r**4 _Iz1 = _Iy1 _Iz2 = _Iy2 return _Area, _Zc, _Yc, _Iy, _Iy1,", "(_b / _a)) _K2 = 1 - 0.3314 * _C + 0.0136 *", "2.7357 * math.sqrt(_DD) - 2.0482 * _DD _C7 = 1.52110 - 5.3864 *", "the horizontal centroidal C self.Ic = self.Iy - self.area * self.Zc**2 # The", "of gyration _ry = math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx / _A) #", "(_a / _b) - 0.01284 * (_b / _a)**2 _Iy = ((((self.tw *", "(1 + K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis # --------------------------------------", "b2 + Yc2)**2)) _Iyy = Iy1 + Iy2 # Extreme fibre distances _x1", "Area _Area = math.pi * r**2 / 4.0 # # Centroid _Zc =", "_x1 = a _y1 = _d - _Yc _y2 = _Yc # Elastic", "Elastic section moduli _Sy = min(_Iyy / _y1, _Iyy / _y2) _Sx =", "2.0 - self.Zc self.Yc = 0 _Yc1 = _b + self.tw / 2.0", "about mayor Axis Iz : Second moment of area about minor axis Zez", ": sys.exit('error a/b > 4 or a/b < 0.25') # Plastic neutral axis", "print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q))", "cross-sections which are extended to half of the circumference (thetaG = 1/2pi) may", "_y1 = self.a * math.sin(_thetaG) _z1 = self.b - self.Zc _z2 = self.Zc", "* (b + 2*a) * (1 + K4 * ((b-a)/(b+a))**2) + t**3 /", "Elastic Modulus about Mayor Axis # -------------------------------------- self.Zey = self.Iy / _Zc1 #", "* self.b * (2 * _thetaG - math.sin( 2 * _thetaG))) # Centroid", "* (3*a + b) * (1 + K3 * ((a-b)/(a+b))**2)) # Second Moment", "(_a / _b) + _C7 / (_a / _b)**2 + _C8 / (_a", "Area about Minor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * b/a", "= float(a) self.b = float(b) self.theta = 90 self.p = float(p) self.q =", "constant Notes ---------- Uses formulas from: 1.- Formulas for stress, strain and strucutral", "file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class", "section _Yc = (A1 * (Yc1 + b2) + A2 * (b2 -", "_Iyy = Iy1 + Iy2 # Extreme fibre distances _x1 = a _y1", "(1 + K4 * ((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic Modulus about", "Build [WELDED / ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM / AVERAGE]", "/ _b < 1.0: _C1 = 0.5067 - 0.5588 * _DD + 1.3820", "* (1 + K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about Mayor Axis #", "* ((_a - _b) / (_a + _b))**2)) + (((self.tw**3 * math.pi /", "= self.Iy - self.area * self.Zc**2 # The distances from the centroid to", "\"\"\" # def __init__(self): # # Build [WELDED / ROLLED] self.build = 'welded'", "Radius of gyration self.ry = math.sqrt(self.Iy / self.area) self.rz = math.sqrt(self.Iz / self.area)", ": Returns ---------- area: Section area Zc : Elastic neutral centre Yc :", "_C7 = -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 * _DD _C8 =", "_thetaG))) # Second Moment of Area about y self.Iz = ((self.a**3 * self.b", "_ry, _Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s}", "(a + 3*b) * (1 + K2 * ((a-b)/(a+b))**2) + math.pi * t**3", "_DD**2 _C3 = 0.0203 + 1.8999 * _DD - 3.4356 * _DD**2 _C4", "_Zc1 # self.Zez = self.Iz / _Yc1 # Plastic Modulus about Mayor Axis", "= self.units_in factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.a *= factors[0] self.b", "self.b * math.sin(_thetaG)) / (3 * _thetaG) self.Yc = 0 # Second Moment", "a**2 / 4.0 * (a + 3*b) * (1 + K2 * ((a-b)/(a+b))**2)", "_Zpz, _rz # # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E}", "0.1349 + 0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2", "_DD - 0.1815 * _DD**2 _C2 = 0.1957 - 0.6608 * _DD +", "* b/a - 0.01284 * (b/a)**2 K3 = 0.1349 + 0.1279 * a/b", "Parameters ---------- a : Mayor Axis b : Minor Axis p : q", "self.q = 0 self.type = 'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length", "(_a / _b)**2 + _C4 / (_a / _b)**3)) _Yp = 0 #", "0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2 _K5 =", "* _thetaG))) # Second Moment of Area about y self.Iz = ((self.a**3 *", "> 0\") # Area self.area = ((2.0 * self.a * self.b / self.q)", "* _DD**2 # _C5 = 0.22410 - 0.3922 * math.sqrt(_DD) + 0.2960 *", "self.q <= 0: sys.exit(\"error p & q > 0\") # Area self.area =", ": Minor Axis thetaG : Angle (degrees) Returns ---------- area: Section area Zc", "_Yc1 # Plastic Modulus about Mayor Axis # -------------------------------------- # Let Zp be", "\"\"\" Input: ====== length : [mandatory] force : temperature : gravity : [default", "* _DD # else : sys.exit('error a/b > 4 or a/b < 0.25')", "\"\"\" # check if section is symmetrical if b2 == 0: b2 =", "(_a - _b) / (_a + _b) _K1 = 0.2464 + 0.002222 *", "0.1835 + 0.895 * a/b - 0.00978 * (a/b)**2 Zey = 1.3333 *", "_units_output) self.units_in = _units_output self.d *= factors[0] #self.tw *= factors[0] #self.a *= factors[0]", "0.2960 * _DD _C6 = -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 *", "* a/b - 0.01284 * (a/b)**2 Iz = (math.pi * t * b**2", "properties Parameters ---------- a : Mayor Axis b : Minor Axis thetaG :", "A1 * (Yc1 + b2 - _Yc)**2) + (Ic2 + A2 * (_Yc", "[default : 9.81ms^2] ------ units [length, mass, time, temperature, force, pressure/stress] \"\"\" for", "- 0.2287 * _C**2 - 0.2193 * _C**3 self.area = ((self.tw * math.pi", "cylindrical shell, \"\"\" # check if section is symmetrical if b2 == 0:", "_C3 = 0.0203 + 1.8999 * _DD - 3.4356 * _DD**2 _C4 =", "modulus about minor axis rz : Radius of gyration about minor Axis Notes", "(math.pi * t * a**2 / 4.0 * (a + 3*b) * (1", ": \\n gravity : [default : 9.81ms^2]\\n ------ units [length, mass, time, temperature,", "8.0) * (2 * _thetaG - math.sin(2 * _thetaG))) # Second Moment of", "/ ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM / AVERAGE] self.shear_stress =", "/ 4.0 # # Centroid _Zc = 4 * r / (3 *", "3.0 # Elastic Modulus about Minor Axis # -------------------------------------- K4 = 0.1835 +", "= 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 * _DD _C8 = -0.8498", "strucutral matrices [W.D. Pilkey] 2.- Roark's formulas for stress and strain [7th Edition]", "program aborted') sys.exit() # def geometry(self, a, b, thetaG): # self.a = float(a)", "K2 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 K3 =", "axis Zez : Elastic modulus about minor axis Zpz : Plastic modulus about", "limit on the maximum # wall thickness allowed in this case. # Cusps", "b2) + A2 * (b2 - Yc2)) / _A # Second moment of", "_C3 / (_a / _b)**2 + _C4 / (_a / _b)**3)) _Yp =", "elif _a / _b >= 1.0 and _a / _b < 4.0: _C1", "moduli minor axis _K4 = 0.1835 + 0.895 * (_b / _a) -", "---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build = 'welded' #", "/ _A) # return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry", "/ AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A' self.units_in = [\"\", \"\", \"second\",", "file_name): check_out = print_header() check_out.append(\"{:23s} {:1.4E} {:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout =", "* math.sqrt(_DD) + 0.3882 * _DD _C7 = -0.1218 + 0.3563 * math.sqrt(_DD)", "return _Area, _Zc, _Yc, _Iy, _Iy1, _Iy2, _Iz, _Iz1, _Iz2 # def closed_cross_section(a,", ".format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout =", "/ (self.p * self.q))))) # Second Moment of Area about y self.Iz =", ">= 1.0 and _a / _b < 4.0: _C1 = 0.4829 + 0.0725", "Centroid self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3) / (3.0 * (2 *", "math.sqrt(_DD) - 0.1803 * _DD _C8 = 0.01540 - 0.0448 * math.sqrt(_DD) +", "p=2.0, q=2.0): # self.a = float(a) self.b = float(b) self.theta = 90 self.p", "Parameters ---------- d : Section Heigh b : Base tw : Wall thickness", "self.type = 'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force", "Parameters ---------- a : Mayor Axis b : Minor Axis thetaG : Angle", "_y1 # plastic section moduli _Zpy = 0 _Zpz = 0 # radii", "- 1.6666 * _DD + 2.6012 * _DD**2 # _C5 = 0.22410 -", "#file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out", "factors[0] #self.tb *= factors[0] # _a = self.d - 0.50 * self.tw _b", "_thetaG))) # Centroid self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3) / (3.0 *", "/ _b else: _tmax = 2 * _b**2 / _a if self.tw >", "Centroid self.Zc = (2 * self.b * math.sin(_thetaG)) / (3 * _thetaG) self.Yc", "_DD - 1.4078 * _DD**2 _C3 = -0.140 + 0.0179 * _DD +", "= A1 Yc2 = Yc1 Ic2 = Ic1 Iy2 = Iy1 _d =", "_b))**2)) + (((self.tw**3 * math.pi / 32.0) * (3 * _a + _b))", "16.0) * (4 * _thetaG - math.sin(4 * _thetaG))) # Second Moment of", "mayor Axis Iz : Second moment of area about minor axis Zez :", "* self.q))))) # Second Moment of Area about y self.Iz = ((2.0 *", "# Area self.area = ((2.0 * self.a * self.b / self.q) * ((math.gamma(1.0", "A2 * (_Yc - b2 + Yc2)**2)) _Iyy = Iy1 + Iy2 #", "Axis Iz : Second moment of area about minor axis Zez : Elastic", "_C2 = 0.1957 - 0.6608 * _DD + 1.4222 * _DD**2 _C3 =", "* _DD**2 _C4 = 0.0578 - 1.6666 * _DD + 2.6012 * _DD**2", "- 0.0565 * _DD**2 # _C5 = -0.0292 + 0.3749 * math.sqrt(_DD) +", "+ 3.9286 * _DD _C8 = -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874", "minor axis rz : Radius of gyration about minor Axis Notes ---------- Uses", "0.0233 * _DD # elif _a / _b >= 1.0 and _a /", "* (1 + K4 * ((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic Modulus", "Pilkey] 2.- Roark's formulas for stress and strain [7th Edition] 3.- Wikipedia Examples", "= (math.pi * t * b**2 / 4.0 * (b + 3*a) *", ": gravity : [default : 9.81ms^2] ------ units [length, mass, time, temperature, force,", "[\"\", \"\", \"second\", \"\", \"\", \"\"] for key, value in kwargs.items(): _unit =", "= 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 * _DD _C7 = -0.1218", "* ((a-b)/(a+b))**2) + t**3 / 3.0 # Elastic Modulus about Minor Axis #", "min(_Iyy / _y1, _Iyy / _y2) _Sx = _Ixx / _x1 # radii", "_thetaG # Centroid self.Zc = (2 * self.b * math.sin(_thetaG)) / (3 *", "is an ellipse 0.2 < a/b < 0.50 Parameters ---------- d : Section", "(2 * _thetaG - math.sin( 2 * _thetaG))) # Centroid self.Zc = ((4.0", "self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out", "+ 2.0 * math.sin(_thetaG)**2))) # Second Moment of Area about the horizontal centroidal", "about y self.Iz = ((self.a**3 * self.b / 24.0) * (6.0 * _thetaG", "Properties #------------------------------------------------- # Second Moment of Area about Mayor Axis # -------------------------------------- _K4", "*= factors[0] #self.a *= factors[0] #self.ta *= factors[0] self.b *= factors[0] #self.tb *=", "= 'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force :", "_K2 = 1 - 0.3314 * _C + 0.0136 * _C**2 + 0.1097", "check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta,", "- math.sin(4 * _thetaG))) # Second Moment of Area about y self.Iz =", "== 0: b2 = b1 A2 = A1 Yc2 = Yc1 Ic2 =", "Roark's formulas for stress and strain [7th Edition] 3.- Wikipedia Examples ---------- \"\"\"", "/ (3.0 * (2 * _thetaG - math.sin(2 * _thetaG)))) self.Yc = 0", "# check if section is symmetrical if b2 == 0: b2 = b1", "* self.tw * _b * (_b + 2 * _a)) * (1 +", "Area about x self.Iy = ((self.a * self.b**3 / 16.0) * (4 *", "print(' process terminated') sys.exit() # units try: _units_output = self.units_out except AttributeError: _units_output", "_Zey, self.Ic, _ry, _Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out =", "(b/a)**2 K3 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2 Iz", "self.b**3 / self.q) * ((math.gamma(3.0 / self.q) * math.gamma((1.0 + self.p) / self.p))", "Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * a/b - 0.01284 *", "#return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz, _Zp", "2.0 * math.sin(_thetaG)**2))) # Second Moment of Area about the horizontal centroidal C", "_Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz # # def print_file(self, file_name): check_out", "extended to half of the circumference (thetaG = 1/2pi) may be combined together", "# def geometry(self, **kwargs): for key, value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self,", "units [length, mass, time, temperature, force, pressure/stress] \"\"\" for key, value in kwargs.items():", "* math.pi) _Yc = _Zc # Second Moment of Area about x _Iy", "* math.sin(_thetaG)**2))) # Second Moment of Area about the horizontal centroidal C self.Ic", "= 0 self.q = 0 self.type = 'Elliptical Sector' def units_output(self, **kwargs): \"\"\"", "self.b * math.cos(_thetaG) # elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic", "cross-sections in bending [<NAME>] Examples ---------- \"\"\" def __init__(self): # Build [WELDED /", "(_a / _b) - 0.01284 * (_a / _b)**2 self.Iz = 0.50 *", "about minor axis rz : Radius of gyration about minor Axis Notes ----------", "self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p + self.p * self.q", "0.1835 + 0.895 * (_b / _a) - 0.00978 * (_b / _a)**2", "*= factors[0] if self.p <= 0 or self.q <= 0: sys.exit(\"error p &", "self.Iy / _Zc1 # self.Zez = self.Iz / _Yc1 # Plastic Modulus about", "Cross-Sectional Area _C = (_a - _b) / (_a + _b) _K1 =", "_K1 = 0.2464 + 0.002222 * ((_a / _b) + (_b / _a))", "(Yc1 + b2) + A2 * (b2 - Yc2)) / _A # Second", "# maximum is exceeded. if _a/_b < 1.0 : _tmax = 2 *", "(3 * _a + _b)) * (1 + _K5 * ((_a - _b)", "value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse'", "moment of area about minor axis Zez : Elastic modulus about minor axis", "= 0.1835 + 0.895 * b/a - 0.00978 * (b/a)**2 Zez = 1.3333", "- self.area * self.Zc**2 # The distances from the centroid to the extreme", "math.sqrt(_DD) + 0.2960 * _DD _C6 = -0.6637 + 2.7357 * math.sqrt(_DD) -", "= ((Ic1 + A1 * (Yc1 + b2 - _Yc)**2) + (Ic2 +", "**kwargs): \"\"\" Input: ====== length : [mandatory] force : temperature : gravity :", "= float(a) self.b = float(b) self.theta = float(thetaG) self.p = 0 self.q =", "t**3 / 3.0 # Elastic Modulus about Minor Axis # -------------------------------------- K4 =", "# -------------------------------------- K2 = 0.1349 + 0.1279 * b/a - 0.01284 * (b/a)**2", "* (Yc1 + b2) + A2 * (b2 - Yc2)) / _A #", "mayor axis Zey : Elastic modulus about mayor axis Zpy : Plastic modulus", "and _a / _b < 1.0: _C1 = 0.5067 - 0.5588 * _DD", "* _DD _C6 = -0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 * _DD", "* ((a-b)/(a+b))**2) # Centroid Zc = a + t / 2.0 Yc =", "_b)**2 + _C8 / (_a / _b)**3)) # Plastic section moduli minor axis", "r**4 _Iy1 = 0.05489 * r**4 _Iy2 = math.pi * r**4 / 16.0", "/ (self.p * self.q))))) # Centroid self.Zc = ((math.pow(4, 1.0 / self.q) *", "* _DD - 3.4356 * _DD**2 _C4 = 0.0578 - 1.6666 * _DD", "print('ok') # class SuperEllipse: \"\"\" Calculate the superellipse cross section properties Superellipses as", "key, value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow", "# Plastic Modulus about Mayor Axis # -------------------------------------- # Let Zp be the", "[default : 9.81ms^2]\\n ------ units [length, mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in", "(4.0 * _a**2 * self.tw * (_C5 + _C6 / (_a / _b)", "Area self.area = self.a * self.b * _thetaG # Centroid self.Zc = (2", "K1 * ((a-b)/(a+b))**2) # Centroid Zc = a + t / 2.0 Yc", "/ 4.0 * (b + 3*a) * (1 + K2 * ((b-a)/(b+a))**2) +", "distance from the bottom # to the plastic neutral axis _DD = self.tw", "((_b - _a) / (_a + _b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- #", "* _DD _C7 = -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 * _DD", "+ self.p * self.q + self.q) / (self.p * self.q))))) # Centroid self.Zc", "(3 * _thetaG) self.Yc = 0 # Second Moment of Area about x", "+ self.q) / self.q)) / (math.gamma((self.p + self.p * self.q + 3 *", "_DD _C8 = -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 * _DD #", "self.units_in[0]: _units_input = self.units_in else: print(' ** error input units not provided') print('", "* self.b**3 / 16.0) * (4 * _thetaG - math.sin(4 * _thetaG))) #", "+ 0.3563 * math.sqrt(_DD) - 0.1803 * _DD _C8 = 0.01540 - 0.0448", "units try: _units_output = self.units_out except AttributeError: _units_output = self.units_in self.units_out = self.units_in", "* _b**2 * math.pi / 4.0) * (_b + 3 * _a)) *", "= float(q) self.type = 'Super Ellipse' def units_output(self, **kwargs): \"\"\" Input:\\n length :", "of full area _Ixx = ((Ic1 + A1 * (Yc1 + b2 -", "open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment: \"\"\" Calculate the circular and elliptical", "_b + self.tw / 2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- # Second Moment", "(_a / _b)**3)) # Plastic section moduli minor axis _K4 = 0.1835 +", "\"\"] def units_input(self, **kwargs): \"\"\" Input: ====== length : [mandatory] force : temperature", "section moduli minor axis _K4 = 0.1835 + 0.895 * (_b / _a)", "axis Zey : Elastic modulus about mayor axis Zpy : Plastic modulus about", "open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate the circular and elliptical", "# radii of gyration self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz /", "Yc2)) / _A # Second moment of full area _Ixx = ((Ic1 +", "< 1.0 : _tmax = 2 * _a**2 / _b else: _tmax =", "* _DD**2 # _C5 = -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 *", "(3*b + a) * (1 + K3 * ((b-a)/(b+a))**2)) # Elastic Modulus about", "_b) - 0.01284 * (_b / _a)**2 _Iy = ((((self.tw * _a**2 *", "(self.tw**2 * _K3 / (6.0 * math.pi * _a))) _Zc1 = _a +", ": [mandatory] force : temperature : gravity : [default : 9.81ms^2] ------ units", "_a**2 * self.tw * (_C5 + _C6 / (_a / _b) + _C7", "Second Moment of Area about the horizontal centroidal C self.Ic = self.Iy -", "- _b) / (_a + _b))**2)) + (((self.tw**3 * math.pi / 32.0) *", "* self.b**3 / 8.0) * (2 * _thetaG + math.sin(2 * _thetaG))) #", "= self.Zc - self.b * math.cos(_thetaG) # elastic section moduli self.Zey = min(self.Ic", "* (1 + K2 * ((a-b)/(a+b))**2) + math.pi * t**3 / 16.0 *", "Returns ---------- area: Section area Zc : Elastic neutral centre Yc : Elastic", "24.0) * (6.0 * _thetaG - math.sin(2 * _thetaG) * (3.0 + 2.0", "sys.exit('error : t > tmax') #------------------------------------------------- # Cross-Sectional Area _C = (_a -", "0: sys.exit(\"error p & q > 0\") # Area self.area = ((2.0 *", "#import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ---------------------------------------- #", "# #return _Area, _Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz", "+ 3*a) * (1 + K2 * ((b-a)/(b+a))**2) + math.pi * t**3 /", "# the ends of the mayor axis if this # maximum is exceeded.", "* _C + 0.0136 * _C**2 + 0.1097 * _C**3 _K3 = 1", "K1 = 0.2464 + 0.002222 * (a/b + b/a) Area = math.pi *", "be provided') print(' program aborted') sys.exit() # def geometry(self, a, b, p=2.0, q=2.0):", "_b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- # Radius of gyration self.ry = math.sqrt(self.Iy", "thetaG): # self.a = float(a) self.b = float(b) self.theta = float(thetaG) self.p =", "Note : there is a limit on the maximum # wall thickness allowed", "* (6.0 * _thetaG - math.sin(2 * _thetaG) * (3.0 + 2.0 *", "+ self.p * self.q + self.q) / (self.p * self.q))))) # Second Moment", "1.8999 * _DD - 3.4356 * _DD**2 _C4 = 0.0578 - 1.6666 *", "((self.a * self.b**3 / 16.0) * (4 * _thetaG - math.sin(4 * _thetaG)))", "self.Zey = self.Iy / _Zc1 # self.Zez = self.Iz / _Yc1 # Plastic", "math.sqrt(self.Iz / self.area) # #return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry, _Iz,", "wall thickness Tw. The midthickness perimeter is an ellipse 0.2 < a/b <", "self.type = 'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force", "* _K2 / math.pi) + (self.tw**2 * _K3 / (6.0 * math.pi *", "(math.pi * t * b**2 / 4.0 * (b + 3*a) * (1", "_DD _C6 = 0.36740 - 0.8531 * math.sqrt(_DD) + 0.3882 * _DD _C7", "(2 * _thetaG - math.sin(2 * _thetaG)))) self.Yc = 0 # Second Moment", "self.shear_stress = 'average' self.compactness = 'N/A' self.units_in = [\"\", \"\", \"second\", \"\", \"\",", "* math.gamma((self.p + self.p * self.q + self.q) / (self.p * self.q))) /", "# def geometry(self, a, b, p=2.0, q=2.0): # self.a = float(a) self.b =", "2.8763 * math.sqrt(_DD) - 1.8874 * _DD # else : sys.exit('error a/b >", "EllipticalSector: \"\"\" Calculate the circular and elliptical sectors cross section properties Parameters ----------", "t * a**2 / 4.0 * (a + 3*b) * (1 + K2", "closed cross-section with finite thickness t, e.g. a tube, hollow rod, pipe or", "self.Yc = 0 # Second Moment of Area about x self.Iy = ((self.a", "(a/b)**2 Iz = (math.pi * t * b**2 / 4.0 * (b +", "0.1803 * _DD _C8 = 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 *", "b, t): \"\"\" a b t \"\"\" # Area K1 = 0.2464 +", "---------- Uses formulas from: 1.- Structural Engineering Formulas <NAME> Examples ---------- \"\"\" #", "* (a + b) * (1 + K1 * ((a-b)/(a+b))**2) # Centroid Zc", "/ 2.0 - self.Zc self.Yc = 0 _Yc1 = _b + self.tw /", "_K1 * ((_a - _b) / (_a + _b))**2)) # Centroid self.Zc =", "\"\"\" a b t \"\"\" # Area K1 = 0.2464 + 0.002222 *", "= float(b) self.theta = float(thetaG) self.p = 0 self.q = 0 self.type =", "Area = math.pi * t * (a + b) * (1 + K1", "extreme fibres _y1 = self.a * math.sin(_thetaG) _z1 = self.b - self.Zc _z2", "/ _a)**2 _Iy = ((((self.tw * _a**2 * math.pi / 8.0) * (_a", "Cw : Warping constant Notes ---------- Uses formulas from: 1.- Formulas for stress,", "_a) / (_b + _a))**2))) # Elastic Modulus about Mayor Axis # --------------------------------------", "< 1.0: _C1 = 0.5067 - 0.5588 * _DD + 1.3820 * _DD**2", "* (3.0 + 2.0 * math.sin(_thetaG)**2))) # Second Moment of Area about the", "= math.radians(self.theta) _thetaG = min(_thetaG, 0.50 * math.pi) # Area self.area = self.a", "= 'Hollow Semiellipse' # def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force", "- 0.8531 * math.sqrt(_DD) + 0.3882 * _DD _C7 = -0.1218 + 0.3563", "Section Properties #------------------------------------------------- # Second Moment of Area about Mayor Axis # --------------------------------------", "*= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG, 0.50 *", "* _DD + 1.3820 * _DD**2 _C2 = 0.3731 + 0.1938 * _DD", "_DD _C7 = 1.52110 - 5.3864 * math.sqrt(_DD) + 3.9286 * _DD _C8", "/ (_a + _b))**2)) + (self.tw**3 / 3.0))) #------------------------------------------------- # Radius of gyration", "self.q))))) self.Yc = 0 # Second Moment of Area about x self.Iy =", "self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name)", "with constant wall thickness Tw. The midthickness perimeter is an ellipse 0.2 <", "Yc2 = 0, Ic2 = 0, Iy2 = 0): \"\"\" Elliptical Sections Profiles", "+ _C7 / (_a / _b)**2 + _C8 / (_a / _b)**3)) #", "+ 0.1279 * (_a / _b) - 0.01284 * (_a / _b)**2 self.Iz", "Angle (degrees) Returns ---------- area: Section area Zc : Elastic neutral centre Yc", "about minor Axis Notes ---------- Uses formulas from: 1.- Geometric properties for the", "self.compactness = 'N/A' self.units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"] def units_input(self,", "---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate the section properties of a Hollow Semiellipse", "- self.area * self.Zc**2 _K2 = 0.1349 + 0.1279 * (_b / _a)", "# Centroid self.Zc = ((4.0 * self.b * math.sin(_thetaG)**3) / (3.0 * (2", "math.pi / 8.0) * (_a + 3 * _b)) * (1 + _K4", "if self.units_in[0]: pass else: print('error length unit must be provided') print(' program aborted')", "---------- a : Mayor Axis b : Minor Axis p : q :", "return _A, _Yc, _x1, _Ixx, _Sx, _rx, _Iyy, _Sy, _ry # # def", "---------- Uses formulas from: 1.- Formulas for stress, strain and strucutral matrices [W.D.", "# -------------------------------------- K4 = 0.1835 + 0.895 * a/b - 0.00978 * (a/b)**2", "the centroid to the extreme fibres _y1 = self.a * math.sin(_thetaG) _z1 =", "# package imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension)", "* self.q + self.q) / (self.p * self.q))))) self.Yc = 0 # Second", "_b)) * (1 + _K4 * ((_a - _b) / (_a + _b))**2))", "self.p = float(p) self.q = float(q) self.type = 'Super Ellipse' def units_output(self, **kwargs):", "_A # Second moment of full area _Ixx = ((Ic1 + A1 *", "/ 2.0 # Second Moment of Area about Mayor Axis # -------------------------------------- K2", "add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class SuperEllipse: \"\"\" Calculate the superellipse", "self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0 + self.p) / self.p)) / (math.gamma((self.p", "Total cross area _A = A1 + A2 # Centroidal C-axis of full", "factors = units.get_length_mass(_units_input, _units_output) self.units_in = _units_output self.d *= factors[0] #self.tw *= factors[0]", "_b else: _tmax = 2 * _b**2 / _a if self.tw > _tmax", "+ Yc2)**2)) _Iyy = Iy1 + Iy2 # Extreme fibre distances _x1 =", "'.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate the", "<= 0: sys.exit(\"error p & q > 0\") # Area self.area = ((2.0", "(b/a)**2 Zez = 1.3333 * t * b * (b + 2*a) *", "* (1.0 + _K1 * ((_a - _b) / (_a + _b))**2)) #", "a Hollow Semiellipse with constant wall thickness Tw. The midthickness perimeter is an", "be provided') print(' program aborted') sys.exit() # def geometry(self, **kwargs): for key, value", "{:1.4E}\" .format(self.type, self.d, self.tw)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout", "(a/b + b/a) Area = math.pi * t * (a + b) *", "2 * _a)) * (1 + _K4 * ((_b - _a) / (_a", "modulus about mayor axis Zpy : Plastic modulus about mayor axis SFy :", "if _a / _b > 0.25 and _a / _b < 1.0: _C1", "aborted') sys.exit() # def geometry(self, a, b, thetaG): # self.a = float(a) self.b", "0, Iy2 = 0): \"\"\" Elliptical Sections Profiles Extension Open cross-sections which are", "= min(_thetaG, 0.50 * math.pi) # Area self.area = self.a * self.b *", "= -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 * _DD _C8 = 0.01540", "area about mayor axis Zey : Elastic modulus about mayor axis Zpy :", "_units_output) self.units_in = _units_output self.a *= factors[0] self.b *= factors[0] self.p *= factors[0]", "math.sin(2 * _thetaG)))) self.Yc = 0 # Second Moment of Area about x", "/ (3 * _thetaG) self.Yc = 0 # Second Moment of Area about", "# # #return self.area, self.Zc, _Yc, self.Ic, _Zey, _Zpy, _ry, self.Iz, _Zez, _Zpz,", "of area about mayor axis Zey : Elastic modulus about mayor axis Zpy", "factors[0] #self.ta *= factors[0] self.b *= factors[0] #self.tb *= factors[0] # _a =", "key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) #", "*= factors[0] #self.tw *= factors[0] #self.a *= factors[0] #self.ta *= factors[0] self.b *=", "self.b * (2 * _thetaG - math.sin( 2 * _thetaG))) # Centroid self.Zc", "_tmax = 2 * _b**2 / _a if self.tw > _tmax : sys.exit('error", "elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic / _z2) self.Zez =", "The distances from the centroid to the extreme fibres _y1 = self.a _z1", "\"\", \"second\", \"\", \"\", \"\"] def units_input(self, **kwargs): \"\"\" Input: ====== length :", "* math.sqrt(_DD) + 0.2960 * _DD _C6 = -0.6637 + 2.7357 * math.sqrt(_DD)", "moment of full area _Ixx = ((Ic1 + A1 * (Yc1 + b2", "K4 * ((b-a)/(b+a))**2) + t**3 / 3.0 return Area, Zc, Yc, Iy, Zey,", "of Area about x _Iy = 0.07135 * r**4 _Iy1 = 0.05489 *", "Second Moment of Area about Mayor Axis # -------------------------------------- _K4 = 0.1349 +", "from the bottom # to the plastic neutral axis _DD = self.tw /", "< 0.50 Parameters ---------- d : Section Heigh b : Base tw :", "axis _Zp = (_a * (_C1 + _C2 / (_a / _b) +", "/ _b)**2 + _C8 / (_a / _b)**3)) # Plastic section moduli minor", "_Yc, self.Ic, _Zey, _Zpy, _ry, _Iz, _Zez, _Zpz, _rz # # def print_file(self,", "in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse' #", "*= factors[0] #self.tb *= factors[0] # _a = self.d - 0.50 * self.tw", "< 0.25') # Plastic neutral axis _Zp = (_a * (_C1 + _C2", "self.a = float(a) self.b = float(b) self.theta = float(thetaG) self.p = 0 self.q", "# radii of gyration _ry = math.sqrt(_Iyy / _A) _rx = math.sqrt(_Ixx /", "Zc = a + t / 2.0 Yc = b + t /", "add_out.close() print('ok') # class EllipticalSector: \"\"\" Calculate the circular and elliptical sectors cross", "\"\", \"second\", \"\", \"\", \"\"] for key, value in kwargs.items(): _unit = units.find_unit_case(key)", "0): \"\"\" Elliptical Sections Profiles Extension Open cross-sections which are extended to half", "_rx, _Iyy, _Sy, _ry # # def hollow_ellipse(a, b, t): \"\"\" a b", "- 0.1815 * _DD**2 _C2 = 0.1957 - 0.6608 * _DD + 1.4222", "_C4 / (_a / _b)**3)) _Yp = 0 # Plastic section moduli mayor", "+ 0.3749 * math.sqrt(_DD) + 0.0578 * _DD _C6 = 0.36740 - 0.8531", "= 0.1349 + 0.1279 * (_b / _a) - 0.01284 * (_b /", "(Ic2 + A2 * (_Yc - b2 + Yc2)**2)) _Iyy = Iy1 +", "= 2 * _a**2 / _b else: _tmax = 2 * _b**2 /", "* self.a * self.b / self.q) * ((math.gamma(1.0 / self.q) * math.gamma((1.0 +", "* b**2 / 4.0 * (b + 3*a) * (1 + K2 *", "* _DD**2 _C4 = 0.0170 - 0.0079 * _DD - 0.0565 * _DD**2", "+ 0.002222 * ((_a / _b) + (_b / _a)) _K2 = 1", "(((self.tw**3 * math.pi / 32.0) * (3 * _a + _b)) * (1", "as a function of the powers p and q Parameters ---------- a :", "_C = (_a - _b) / (_a + _b) _K1 = 0.2464 +", "sys.exit('error a/b > 4 or a/b < 0.25') # Plastic neutral axis _Zp", "of a Hollow Semiellipse with constant wall thickness Tw. The midthickness perimeter is", "make a hollow closed cross-section with finite thickness t, e.g. a tube, hollow", "* self.q + self.q) / (self.p * self.q))))) # Centroid self.Zc = ((math.pow(4,", "sys.exit() # def geometry(self, a, b, thetaG): # self.a = float(a) self.b =", "self.Zc = (2 * self.b * math.sin(_thetaG)) / (3 * _thetaG) self.Yc =", "/ _A) _rx = math.sqrt(_Ixx / _A) # return _A, _Yc, _x1, _Ixx,", "about Mayor Axis # -------------------------------------- self.Zey = self.Iy / _Zc1 # self.Zez =", "axis SFy : Shape factor mayor axis ry : Radius of gyration about", "not provided') print(' process terminated') sys.exit() # units try: _units_output = self.units_out except", "Yc : Elastic neutral centre Iy : Second moment of area about mayor", "force : [mandatory]\\n temperature : \\n gravity : [default : 9.81ms^2]\\n ------ units", "\"\"\" # Area _Area = math.pi * r**2 / 4.0 # # Centroid", "b, p=2.0, q=2.0): # self.a = float(a) self.b = float(b) self.theta = 90", "/ 4.0 * (a + 3*b) * (1 + K2 * ((a-b)/(a+b))**2) +", "and _a / _b < 4.0: _C1 = 0.4829 + 0.0725 * _DD", "elliptical segments cross section properties Parameters ---------- a : Mayor Axis b :", ".format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout = str(file_checkout[0])", "/ _y1 # plastic section moduli _Zpy = 0 _Zpz = 0 #", "the plastic neutral axis _DD = self.tw / _tmax _DD = max(_DD ,", "'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force : [mandatory]\\n", "** error input units not provided') print(' process terminated') sys.exit() # units try:", "/ (math.gamma((3 * self.p + self.p * self.q + self.q) / (self.p *", "2*a) * (1 + K4 * ((b-a)/(b+a))**2) + t**3 / 3.0 return Area,", "def geometry(self, **kwargs): for key, value in kwargs.items(): _dim = find_section_dimensions(key) get_dimension(self, _dim,", "self.q) / (self.p * self.q))))) #print('Jy',_Iz / 10**4) # Second Moment of Area", "* math.cos(_thetaG) # elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic /", "gyration self.ry = math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # #", "- _a) / (_b + _a))**2))) # Elastic Modulus about Mayor Axis #", "+ self.p * self.q + self.q) / (self.p * self.q))) / (math.gamma((2 *", "_Iy - self.area * self.Zc**2 _K2 = 0.1349 + 0.1279 * (_b /", "_thetaG)))) self.Yc = 0 # Second Moment of Area about x self.Iy =", "# ---------------------------------------- # Elliptical Sections Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\" Calculate", "Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build = 'welded'", "= _a + self.tw / 2.0 - self.Zc self.Yc = 0 _Yc1 =", "/ (_a / _b)**2 + _C4 / (_a / _b)**3)) _Yp = 0", "_Zc1 = _a + self.tw / 2.0 - self.Zc self.Yc = 0 _Yc1", "Area about y self.Iz = ((self.a**3 * self.b / 24.0) * (6.0 *", "* self.b / (2 * math.sqrt(math.pi))) * ((math.gamma((2.0 + self.q) / (2 *", "(3 * _b + _a)) * (1 + _K3 * ((_b - _a)", "section moduli mayor axis self.Zpy = (4.0 * _a**2 * self.tw * (_C5", "= A1 + A2 # Centroidal C-axis of full section _Yc = (A1", "= ((self.a**3 * self.b / 24.0) * (6.0 * _thetaG - math.sin(2 *", "about Minor Axis # -------------------------------------- K4 = 0.1835 + 0.895 * b/a -", "_DD _C7 = -0.1218 + 0.3563 * math.sqrt(_DD) - 0.1803 * _DD _C8", "math.pi * r**2 / 4.0 # # Centroid _Zc = 4 * r", "Zey : Elastic modulus about mayor axis Zpy : Plastic modulus about mayor", "+ 0.1097 * _C**3 _K3 = 1 + 0.9929 * _C - 0.2287", "#file_checkout = str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out))", "self.b**3 / 16.0) * (4 * _thetaG - math.sin(4 * _thetaG))) # Second", "Calculate a quarter of a circle Parameters ---------- r : radius Returns ----------", "-------------------------------------- self.Zey = self.Iy / _Zc1 # self.Zez = self.Iz / _Yc1 #", "* r**4 _Iy1 = 0.05489 * r**4 _Iy2 = math.pi * r**4 /", "self.units_out = units.units_module(_unit, value, _units_in) # # # def get_property(self): # if self.units_in[0]:", "---------- r : radius Returns ---------- area: Section area Zc : Elastic neutral", ": Elastic modulus about minor axis rz : Radius of gyration about minor", "(1 + K4 * ((b-a)/(b+a))**2) + t**3 / 3.0 return Area, Zc, Yc,", "8.0) * (_a + 3 * _b)) * (1 + _K4 * ((_a", "section moduli _Sy = min(_Iyy / _y1, _Iyy / _y2) _Sx = _Ixx", "= self.tw / _tmax _DD = max(_DD , 0.20) _DD = min(_DD ,", "min(abs(_thetaG), 0.50 * math.pi) # Area self.area = (0.50 * self.a * self.b", "- 0.00978 * (b/a)**2 Zez = 1.3333 * t * b * (b", "axis _K4 = 0.1835 + 0.895 * (_b / _a) - 0.00978 *", "((_a / _b) + (_b / _a)) _K2 = 1 - 0.3314 *", "* a**2 / 4.0 * (a + 3*b) * (1 + K2 *", "str(file_checkout[0]) +'_check_me.txt' file_checkout = str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok')", "0.0578 - 1.6666 * _DD + 2.6012 * _DD**2 # _C5 = 0.22410", "add_out.close() print('ok') # def quarterCircle(r): \"\"\" Calculate a quarter of a circle Parameters", "A1 + A2 # Centroidal C-axis of full section _Yc = (A1 *", "b2 # Total cross area _A = A1 + A2 # Centroidal C-axis", "/ 4.0) * (_b + 3 * _a)) * (1 + _K2 *", "+ math.sin(2 * _thetaG))) # Second Moment of Area about y self.Iz =", "Axis Notes ---------- Uses formulas from: 1.- Structural Engineering Formulas <NAME> Examples ----------", "min(_thetaG, 0.50 * math.pi) # Area self.area = self.a * self.b * _thetaG", "gravity : [default : 9.81ms^2] ------ units [length, mass, time, temperature, force, pressure/stress]", "minor axis _K4 = 0.1835 + 0.895 * (_b / _a) - 0.00978", "+ _K4 * ((_b - _a) / (_a + _b))**2)) + (self.tw**3 /", "value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # def", "(a + 2*b) * (1 + K4 * ((a-b)/(a+b))**2) + t**3 / 3.0", "-0.6637 + 2.7357 * math.sqrt(_DD) - 2.0482 * _DD _C7 = 1.52110 -", "self.Zez = self.Iz / _Yc1 # Plastic Modulus about Mayor Axis # --------------------------------------", "* (_C1 + _C2 / (_a / _b) + _C3 / (_a /", "(3 * math.pi) _Yc = _Zc # Second Moment of Area about x", "/ _b)**3)) _Yp = 0 # Plastic section moduli mayor axis self.Zpy =", "neutral axis _Zp = (_a * (_C1 + _C2 / (_a / _b)", "= 'Elliptical Sector' def units_output(self, **kwargs): \"\"\" Input:\\n length : [mandatory]\\n force :", "= self.Zc # elastic section moduli self.Zey = min(self.Ic / _z1, self.Ic /", "= float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical Sector' def", "q > 0\") # Area self.area = ((2.0 * self.a * self.b /", "1.- Formulas for stress, strain and strucutral matrices [W.D. Pilkey] 2.- Roark's formulas", "A2 = A1 Yc2 = Yc1 Ic2 = Ic1 Iy2 = Iy1 _d", "'welded' # Shear Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A'", "# Second Moment of Area about Mayor Axis # -------------------------------------- K2 = 0.1349", "/ 16.0 # Second Moment of Area about y _Iz = 0.03843 *", "in kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass", "= math.sqrt(self.Iz / self.area) # #return self.area, _Zc, _Yc, _Iy, _Zey, _Zpy, _ry,", "or cylindrical shell, \"\"\" # check if section is symmetrical if b2 ==", "axis if this # maximum is exceeded. if _a/_b < 1.0 : _tmax", "0.0179 * _DD + 0.4885 * _DD**2 _C4 = 0.0170 - 0.0079 *", "Plastic neutral axis _Zp = (_a * (_C1 + _C2 / (_a /", "self.q) / self.q)) / (math.gamma((self.p + self.p * self.q + 3 * self.q)", "factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi) # Area self.area", "self.q + self.q) / (self.p * self.q))))) self.Yc = 0 # Second Moment", "math.sqrt(_DD) + 0.0578 * _DD _C6 = 0.36740 - 0.8531 * math.sqrt(_DD) +", "= b + t / 2.0 # Second Moment of Area about Mayor", "* _DD _C8 = -0.8498 + 2.8763 * math.sqrt(_DD) - 1.8874 * _DD", "math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc,", "10**4) # Second Moment of Area about the horizontal centroidal C self.Ic =", "* _C**3 _K3 = 1 + 0.9929 * _C - 0.2287 * _C**2", "= 0.01540 - 0.0448 * math.sqrt(_DD) + 0.0233 * _DD # elif _a", "_DD**2 # _C5 = -0.0292 + 0.3749 * math.sqrt(_DD) + 0.0578 * _DD", "0.25') # Plastic neutral axis _Zp = (_a * (_C1 + _C2 /", "* r**4 / 16.0 # Second Moment of Area about y _Iz =", "* (_a / _b)**2 _K5 = 0.1349 + 0.1279 * (_a / _b)", "self.q + self.q) / (self.p * self.q))) / (math.gamma((2 * self.p + self.p", "from the centroid to the extreme fibres _y1 = self.a * math.sin(_thetaG) _z1", "the superellipse cross section properties Superellipses as a function of the powers p", "Second Moment of Area about x _Iy = 0.07135 * r**4 _Iy1 =", "# -------------------------------------- _K4 = 0.1349 + 0.1279 * (_a / _b) - 0.01284", "* r**2 / 4.0 # # Centroid _Zc = 4 * r /", "_b) + _C7 / (_a / _b)**2 + _C8 / (_a / _b)**3))", "0.50 * ((((self.tw * _b**2 * math.pi / 4.0) * (_b + 3", "_a))**2))) # Elastic Modulus about Mayor Axis # -------------------------------------- self.Zey = self.Iy /", "be provided') print(' program aborted') sys.exit() # def geometry(self, a, b, thetaG): #", "* self.tw * (_C5 + _C6 / (_a / _b) + _C7 /", "- 0.50 * self.tw # Note : there is a limit on the", "the design of unusual member cross-sections in bending [<NAME>] Examples ---------- \"\"\" def", "* _a**2 * self.tw * (_C5 + _C6 / (_a / _b) +", "= [\"\", \"\", \"second\", \"\", \"\", \"\"] for key, value in kwargs.items(): _unit", "if self.tw > _tmax : sys.exit('error : t > tmax') #------------------------------------------------- # Cross-Sectional", "+ _C8 / (_a / _b)**3)) # Plastic section moduli minor axis _K4", "_rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type,", "* self.q + 3 * self.q) / (self.p * self.q))))) #print('Jy',_Iz / 10**4)", "imports import math # # package imports #import steelpy.units.control as units #from steelpy.sectionproperty.shapes.iomodule", "+ self.p * self.q + self.q) / (self.p * self.q))))) self.Yc = 0", "self.q))))) # Second Moment of Area about y self.Iz = ((2.0 * self.a**3", "get_dimension) # ---------------------------------------- # Elliptical Sections Profiles # ---------------------------------------- # class HollowSemiellipse: \"\"\"", "((4.0 * self.b * math.sin(_thetaG)**3) / (3.0 * (2 * _thetaG - math.sin(2", "SuperEllipse: \"\"\" Calculate the superellipse cross section properties Superellipses as a function of", "_Zc # Second Moment of Area about x _Iy = 0.07135 * r**4", "= ((self.tw * math.pi / 2.0) * (_a + _b) * (1.0 +", "/ _z2) self.Zez = self.Iz / _y1 # plastic section moduli _Zpy =", "_a))) _Zc1 = _a + self.tw / 2.0 - self.Zc self.Yc = 0", "= 'average' self.compactness = 'N/A' self.units_in = [\"\", \"\", \"second\", \"\", \"\", \"\"]", "------ units [length, mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\",", "_C + 0.0136 * _C**2 + 0.1097 * _C**3 _K3 = 1 +", "math.pi / 32.0) * (3 * _a + _b)) * (1 + _K5", "[A.J. Sadowski] Examples ---------- \"\"\" def __init__(self): # Build [WELDED / ROLLED] self.build", "* self.p + self.p * self.q + self.q) / (self.p * self.q))))) #", "_Iz, _Zez, _Zpz, _rz # # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s}", "q Parameters ---------- a : Mayor Axis b : Minor Axis p :", "0.1835 + 0.895 * b/a - 0.00978 * (b/a)**2 Zez = 1.3333 *", "factors[0] if self.p <= 0 or self.q <= 0: sys.exit(\"error p & q", "_y1 = self.a _z1 = self.b - self.Zc _z2 = self.Zc # elastic", "= 0.2464 + 0.002222 * (a/b + b/a) Area = math.pi * t", "+ t / 2.0 Yc = b + t / 2.0 # Second", "check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type, self.a, self.b, self.theta)) check_out.extend(print_properties(self)) #file_checkout", "elliptical sectors cross section properties Parameters ---------- a : Mayor Axis b :", "self.b * _thetaG # Centroid self.Zc = (2 * self.b * math.sin(_thetaG)) /", "[MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A' self.units_in = [\"\", \"\",", "shell, \"\"\" # check if section is symmetrical if b2 == 0: b2", "math.sin(_thetaG) _z1 = self.b - self.Zc _z2 = self.Zc - self.b * math.cos(_thetaG)", "_C4 = 0.0578 - 1.6666 * _DD + 2.6012 * _DD**2 # _C5", "(1.0 + _K1 * ((_a - _b) / (_a + _b))**2)) # Centroid", "====== length : [mandatory] force : temperature : gravity : [default : 9.81ms^2]", "# -------------------------------------- K2 = 0.1349 + 0.1279 * a/b - 0.01284 * (a/b)**2", "_Iz, _Zez, _Zpz, _rz # def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E}", "+ self.q) / (self.p * self.q))))) # Second Moment of Area about y", "* _a))) _Zc1 = _a + self.tw / 2.0 - self.Zc self.Yc =", "(1 + K2 * ((b-a)/(b+a))**2) + math.pi * t**3 / 16.0 * (3*b", "self.area * self.Zc**2 _K2 = 0.1349 + 0.1279 * (_b / _a) -", "/ (_a + _b))**2)) + (((self.tw**3 * math.pi / 32.0) * (3 *", "4.0 * (a + 3*b) * (1 + K2 * ((a-b)/(a+b))**2) + math.pi", "moduli _Sy = min(_Iyy / _y1, _Iyy / _y2) _Sx = _Ixx /", ": Radius of gyration about mayor Axis Iz : Second moment of area", "_a))**2)) + (((self.tw**3 * math.pi / 16.0) * (3 * _b + _a))", "Zpy : Plastic modulus about mayor axis SFy : Shape factor mayor axis", "self.units_in = _units_output self.d *= factors[0] #self.tw *= factors[0] #self.a *= factors[0] #self.ta", "self.b = float(b) self.theta = 90 self.p = float(p) self.q = float(q) self.type", "Moment of Area about y self.Iz = ((2.0 * self.a**3 * self.b /", "_dim = find_section_dimensions(key) get_dimension(self, _dim, value) self.type = 'Hollow Semiellipse' # def units_output(self,", "t * b**2 / 4.0 * (b + 3*a) * (1 + K2", "= -0.140 + 0.0179 * _DD + 0.4885 * _DD**2 _C4 = 0.0170", "as units #from steelpy.sectionproperty.shapes.iomodule import (find_section_dimensions, # get_dimension) # ---------------------------------------- # Elliptical Sections", "rz : Radius of gyration about minor Axis SC : Shear centre Cw", "+ 2*b) * (1 + K4 * ((a-b)/(a+b))**2) + t**3 / 3.0 #", ": Base tw : Wall thickness Returns ---------- area: Section area Zc :", "* (2 * _thetaG + math.sin(2 * _thetaG))) # Second Moment of Area", "_a / _b < 1.0: _C1 = 0.5067 - 0.5588 * _DD +", "geometry(self, a, b, p=2.0, q=2.0): # self.a = float(a) self.b = float(b) self.theta", "math.sqrt(_DD) - 2.0482 * _DD _C7 = 1.52110 - 5.3864 * math.sqrt(_DD) +", ": 9.81ms^2] ------ units [length, mass, time, temperature, force, pressure/stress] \"\"\" for key,", "0 _Yc1 = _b + self.tw / 2.0 #------------------------------------------------- # Section Properties #-------------------------------------------------", "_C**3 _K3 = 1 + 0.9929 * _C - 0.2287 * _C**2 -", "factor mayor axis ry : Radius of gyration about mayor Axis Iz :", "= str(file_name) + '.txt' add_out = open(file_checkout,'w') add_out.write(\"\".join(check_out)) add_out.close() print('ok') # class EllipticalSegment:", "key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_in = units.units_module(_unit, value, self.units_in) if", "units [length, mass, time, temperature, force, pressure/stress]/n \"\"\" _units_in = [\"\", \"\", \"second\",", "/ 2.0 #------------------------------------------------- # Section Properties #------------------------------------------------- # Second Moment of Area about", "self.q))))) #print('Jy',_Iz / 10**4) # Second Moment of Area about the horizontal centroidal", "_y2 = _Yc # Elastic section moduli _Sy = min(_Iyy / _y1, _Iyy", "0.0448 * math.sqrt(_DD) + 0.0233 * _DD # elif _a / _b >=", "* t * a**2 / 4.0 * (a + 3*b) * (1 +", "superellipse cross section properties Superellipses as a function of the powers p and", "/ self.p)) / (math.gamma((3 * self.p + self.p * self.q + self.q) /", "[WELDED / ROLLED] self.build = 'welded' # Shear Stress [MAXIMUM / AVERAGE] self.shear_stress", "self.Iy = ((self.a * self.b**3 / 16.0) * (4 * _thetaG - math.sin(4", "the centroid to the extreme fibres _y1 = self.a _z1 = self.b -", "_z2 = self.Zc - self.b * math.cos(_thetaG) # elastic section moduli self.Zey =", "-------------------------------------- _K4 = 0.1349 + 0.1279 * (_a / _b) - 0.01284 *", "factors[0] self.p *= factors[0] self.q *= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(_thetaG,", "_y2) _Sx = _Ixx / _x1 # radii of gyration _ry = math.sqrt(_Iyy", "((_a - _b) / (_a + _b))**2)) # Centroid self.Zc = ((2.0 *", "math.pi) + (self.tw**2 * _K3 / (6.0 * math.pi * _a))) _Zc1 =", "section properties of a Hollow Semiellipse with constant wall thickness Tw. The midthickness", "((self.tw * math.pi / 2.0) * (_a + _b) * (1.0 + _K1", "_DD - 0.0565 * _DD**2 # _C5 = -0.0292 + 0.3749 * math.sqrt(_DD)", "perimeter is an ellipse 0.2 < a/b < 0.50 Parameters ---------- d :", "min(self.Ic / _z1, self.Ic / _z2) self.Zez = self.Iz / _y1 # plastic", "/ _b)**2 _K5 = 0.1349 + 0.1279 * (_a / _b) - 0.01284", "mayor axis Zpy : Plastic modulus about mayor axis SFy : Shape factor", "in kwargs.items(): _unit = units.find_unit_case(key) self.units_out = units.units_module(_unit, value, _units_in) # # #", "- 0.5588 * _DD + 1.3820 * _DD**2 _C2 = 0.3731 + 0.1938", "minor axis SFz : Shape factor minor axis rz : Radius of gyration", "/ self.area) self.rz = math.sqrt(self.Iz / self.area) # #return self.area, _Zc, _Yc, _Iy,", "and elliptical sectors cross section properties Parameters ---------- a : Mayor Axis b", "\"\", \"\", \"\"] for key, value in kwargs.items(): _unit = units.find_unit_case(key) self.units_out =", "- _b) / (_a + _b) _K1 = 0.2464 + 0.002222 * ((_a", ": Elastic neutral centre Yc : Elastic neutral centre Iy : Second moment", "_b)**2 + _C4 / (_a / _b)**3)) _Yp = 0 # Plastic section", "+ A2 # Centroidal C-axis of full section _Yc = (A1 * (Yc1", "strain [7th Edition] 3.- Wikipedia Examples ---------- \"\"\" # def __init__(self): # #", "0.1279 * (_a / _b) - 0.01284 * (_b / _a)**2 _Iy =", "Second Moment of Area about Mayor Axis # -------------------------------------- K2 = 0.1349 +", "half of the circumference (thetaG = 1/2pi) may be combined together to make", "a + t / 2.0 Yc = b + t / 2.0 #", "+ K2 * ((a-b)/(a+b))**2) + math.pi * t**3 / 16.0 * (3*a +", "/ _y1, _Iyy / _y2) _Sx = _Ixx / _x1 # radii of", "* self.a * self.b * (2 * _thetaG - math.sin( 2 * _thetaG)))", "the section properties of a Hollow Semiellipse with constant wall thickness Tw. The", "+ _C3 / (_a / _b)**2 + _C4 / (_a / _b)**3)) _Yp", "formulas from: 1.- Formulas for stress, strain and strucutral matrices [W.D. Pilkey] 2.-", "*= factors[0] _thetaG = math.radians(self.theta) _thetaG = min(abs(_thetaG), 0.50 * math.pi) # Area", "def print_file(self, file_name): check_out = print_header_ellipse() check_out.append(\"{:23s} {:1.4E} {:1.4E} {:1.4E} {:1.4E} {:1.4E}\" .format(self.type,", "Shear Stress [MAXIMUM / AVERAGE] self.shear_stress = 'average' self.compactness = 'N/A' self.units_in =", "& q > 0\") # Area self.area = ((2.0 * self.a * self.b", "self.Zc _z2 = self.Zc # elastic section moduli self.Zey = min(self.Ic / _z1,", "Shape factor mayor axis ry : Radius of gyration about mayor Axis Iz", "self.area * self.Zc**2 #print('Jx',self.Ic / 10**4) # The distances from the centroid to", "provided') print(' program aborted') sys.exit() # def geometry(self, a, b, thetaG): # self.a", "float(thetaG) self.p = 0 self.q = 0 self.type = 'Elliptical Segment' def units_output(self,", "- 0.01284 * (_b / _a)**2 _K3 = 0.1349 + 0.1279 * (_a", "= math.sqrt(self.Ic / self.area) self.rz = math.sqrt(self.Iz / self.area) # # #return self.area,", "about Mayor Axis # -------------------------------------- K2 = 0.1349 + 0.1279 * a/b -", "= units.units_module(_unit, value, _units_in) # # # def get_property(self): # if self.units_in[0]: _units_input", "_b)**3)) # Plastic section moduli minor axis _K4 = 0.1835 + 0.895 *", "_Iy2 = math.pi * r**4 / 16.0 # Second Moment of Area about", ": [default : 9.81ms^2] ------ units [length, mass, time, temperature, force, pressure/stress] \"\"\"", "= units.units_module(_unit, value, self.units_in) if self.units_in[0]: pass else: print('error length unit must be", "/ _Zc1 # self.Zez = self.Iz / _Yc1 # Plastic Modulus about Mayor", "* math.sqrt(_DD) - 0.1803 * _DD _C8 = 0.01540 - 0.0448 * math.sqrt(_DD)", "= math.sqrt(self.Iz / self.area) # #return _Area, _Zc, _Yc, _Iy, _Zey, self.Ic, _ry,", "{:1.4E}\" .format(self.type, self.a, self.b, self.theta, self.p, self.q)) check_out.extend(print_properties(self)) #file_checkout = split_file_name(file_name) #file_checkout =", "on the maximum # wall thickness allowed in this case. # Cusps will", "t / 2.0 # Second Moment of Area about Mayor Axis # --------------------------------------", "b) * (1 + K3 * ((a-b)/(a+b))**2)) # Second Moment of Area about" ]
[ "val in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\"", "oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a", "print results info(\">\") info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> |", "= [] range_tuples = [] # we fill the arrays and use the", "from gauss-optimizer-value to variable for key in knobs: variables.append(key) # values in knobs", "oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf):", "wf.execution_strategy[\"knobs\"] # we create a list of variable names and a list of", "variable for key in knobs: variables.append(key) # values in knobs might come unordered,", "variables): \"\"\" this is the function we call and that returns a value", "= opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the function", "array for idx, val in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf,", "new experiment to run in execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"]", "fill the arrays and use the index to map from gauss-optimizer-value to variable", "and variables in their array for idx, val in enumerate(variables): knob_object[val] = opti_values[idx]", "from skopt import gp_minimize from oeda.log import * from oeda.rtxlib.execution import experimentFunction from", "strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"]", "knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the function we call and", "Optimizer | \" + acquisition_method, Fore.CYAN) # we look at the ranges the", "we fill the arrays and use the index to map from gauss-optimizer-value to", "{} # create the knobObject based on the position of the opti_values and", "| \" + acquisition_method, Fore.CYAN) # we look at the ranges the user", "recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create a new experiment to run", "from a variable \"\"\" knob_object = {} # create the knobObject based on", "self optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments", "knobs = wf.execution_strategy[\"knobs\"] # we create a list of variable names and a", "callback to execute # it uses the return value (it tries to minimize", "a list of knob (from,to) variables = [] range_tuples = [] # we", "| Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun)) #", "= [] # we fill the arrays and use the index to map", "gauss-optimizer-value to variable for key in knobs: variables.append(key) # values in knobs might", "them to avoid dimension errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value =", "Result: \" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def", "min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl)", "knob (from,to) variables = [] range_tuples = [] # we fill the arrays", "+ acquisition_method, Fore.CYAN) # we look at the ranges the user has specified", "returns a value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\",", "value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) #", "tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we give the minimization function a callback", "in execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] =", "# values in knobs might come unordered, so sort them to avoid dimension", "= wf.execution_strategy[\"knobs\"] # we create a list of variable names and a list", "and a list of knob (from,to) variables = [] range_tuples = [] #", "from colorama import Fore from skopt import gp_minimize from oeda.log import * from", "# we give the minimization function a callback to execute # it uses", "knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values,", "* from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs", "\" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables,", "skopt import gp_minimize from oeda.log import * from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy", "variables = [] range_tuples = [] # we fill the arrays and use", "\"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create a new", "knob values from a variable \"\"\" knob_object = {} # create the knobObject", "of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value,", "has specified in the knobs knobs = wf.execution_strategy[\"knobs\"] # we create a list", "select new knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values:", "is the function we call and that returns a value for optimization \"\"\"", "a new experiment to run in execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"]", "dimension errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl", "we give the minimization function a callback to execute # it uses the", "done, print results info(\">\") info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\">", "| Result: \" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun", "the arrays and use the index to map from gauss-optimizer-value to variable for", "gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done,", "optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create a", "info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values", "info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" +", "knob_object) # create a new experiment to run in execution exp = dict()", "of knob (from,to) variables = [] range_tuples = [] # we fill the", "range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print results info(\">\") info(\"> OptimalResult", "avoid dimension errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1]))", "ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"]", "opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print results info(\">\")", "for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create", "import gp_minimize from oeda.log import * from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import", "execute # it uses the return value (it tries to minimize it) to", "import Fore from skopt import gp_minimize from oeda.log import * from oeda.rtxlib.execution import", "that returns a value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in", "wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method, Fore.CYAN) # we look at the", "results info(\">\") info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result:", "ranges the user has specified in the knobs knobs = wf.execution_strategy[\"knobs\"] # we", "applyInitKnobs(wf) \"\"\" executes a self optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN)", "a variable \"\"\" knob_object = {} # create the knobObject based on the", "knob_object = {} # create the knobObject based on the position of the", "+ str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf)", "exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] = knob_object wf.setup_stage(wf,", "knobObject based on the position of the opti_values and variables in their array", "self_opt_execution\", knob_object) # create a new experiment to run in execution exp =", "key in knobs: variables.append(key) # values in knobs might come unordered, so sort", "applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing strategy \"\"\" info(\"> ExecStrategy", "optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables,", "user has specified in the knobs knobs = wf.execution_strategy[\"knobs\"] # we create a", "# create the knobObject based on the position of the opti_values and variables", "idx, val in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables):", "\"\"\" executes a self optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method", "knobs might come unordered, so sort them to avoid dimension errors of scikit", "return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from a", "knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the", "info(\"> Optimizer | \" + acquisition_method, Fore.CYAN) # we look at the ranges", "uses the return value (it tries to minimize it) to select new knobs", "Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun)) # finished", "opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the function we", "the return value (it tries to minimize it) to select new knobs to", "errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl =", "their array for idx, val in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def", "+ str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values):", "= wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method, Fore.CYAN) #", "the position of the opti_values and variables in their array for idx, val", "to minimize it) to select new knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples)", "gp_minimize from oeda.log import * from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs", "in knobs: variables.append(key) # values in knobs might come unordered, so sort them", "come unordered, so sort them to avoid dimension errors of scikit min_value =", "from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def", "(it tries to minimize it) to select new knobs to test print(\"variables\", variables)", "= wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" +", "[] range_tuples = [] # we fill the arrays and use the index", "scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value])", "min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we", "# it uses the return value (it tries to minimize it) to select", "minimize it) to select new knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result", "start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\",", "return value (it tries to minimize it) to select new knobs to test", "n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print results info(\">\") info(\"> OptimalResult |", "wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method,", "recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from a variable \"\"\" knob_object = {}", "this is the function we call and that returns a value for optimization", "variable names and a list of knob (from,to) variables = [] range_tuples =", "list of variable names and a list of knob (from,to) variables = []", "acq_func=acquisition_method) # optimizer is done, print results info(\">\") info(\"> OptimalResult | Knobs: \"", "from oeda.log import * from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from", "SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer", "= min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) #", "value (it tries to minimize it) to select new knobs to test print(\"variables\",", "colorama import Fore from skopt import gp_minimize from oeda.log import * from oeda.rtxlib.execution", "create the knobObject based on the position of the opti_values and variables in", "return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the function we call", "recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from a variable", "oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing strategy \"\"\"", "so sort them to avoid dimension errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1]))", "recreates knob values from a variable \"\"\" knob_object = {} # create the", "a self optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"]", "look at the ranges the user has specified in the knobs knobs =", "new knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf,", "info(\"> | Result: \" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x),", "the opti_values and variables in their array for idx, val in enumerate(variables): knob_object[val]", "Fore from skopt import gp_minimize from oeda.log import * from oeda.rtxlib.execution import experimentFunction", "in knobs might come unordered, so sort them to avoid dimension errors of", "of the opti_values and variables in their array for idx, val in enumerate(variables):", "opti_values and variables in their array for idx, val in enumerate(variables): knob_object[val] =", "test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples,", "index to map from gauss-optimizer-value to variable for key in knobs: variables.append(key) #", "optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method, Fore.CYAN) # we look", "it uses the return value (it tries to minimize it) to select new", "import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing strategy \"\"\" info(\">", "def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from a variable \"\"\" knob_object =", "position of the opti_values and variables in their array for idx, val in", "function we call and that returns a value for optimization \"\"\" knob_object =", "knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create a new experiment", "opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print", "= dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] = knob_object wf.setup_stage(wf, exp[\"knobs\"])", "str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return", "execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] = knob_object", "knobs knobs = wf.execution_strategy[\"knobs\"] # we create a list of variable names and", "self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print results", "# we look at the ranges the user has specified in the knobs", "# we create a list of variable names and a list of knob", "range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) #", "to execute # it uses the return value (it tries to minimize it)", "def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing strategy \"\"\" info(\"> ExecStrategy |", "from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes", "print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method)", "in the knobs knobs = wf.execution_strategy[\"knobs\"] # we create a list of variable", "\" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun)) # finished info(\">\")", "str(optimizer_result.fun)) # finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\"", "finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob", "def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the function we call and that", "# finished info(\">\") applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates", "the minimization function a callback to execute # it uses the return value", "dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] = knob_object wf.setup_stage(wf, exp[\"knobs\"]) return", "function a callback to execute # it uses the return value (it tries", "= wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] = knob_object wf.setup_stage(wf, exp[\"knobs\"]) return experimentFunction(wf, exp)", "for key in knobs: variables.append(key) # values in knobs might come unordered, so", "executes a self optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method =", "OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \" + str(optimizer_result.fun))", "the function we call and that returns a value for optimization \"\"\" knob_object", "opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create a new experiment to run in", "print(\"knob_object in self_opt_execution\", knob_object) # create a new experiment to run in execution", "specified in the knobs knobs = wf.execution_strategy[\"knobs\"] # we create a list of", "[] # we fill the arrays and use the index to map from", "n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print results info(\">\") info(\"> OptimalResult | Knobs:", "optimizer is done, print results info(\">\") info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables,", "opti_values, variables): \"\"\" this is the function we call and that returns a", "tries to minimize it) to select new knobs to test print(\"variables\", variables) print(\"range_tuples\",", "in self_opt_execution\", knob_object) # create a new experiment to run in execution exp", "# create a new experiment to run in execution exp = dict() exp[\"ignore_first_n_samples\"]", "# we fill the arrays and use the index to map from gauss-optimizer-value", "tuple([min_value, max_value]) range_tuples.append(tpl) # we give the minimization function a callback to execute", "print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments,", "acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \"", "we look at the ranges the user has specified in the knobs knobs", "based on the position of the opti_values and variables in their array for", "\"\"\" this is the function we call and that returns a value for", "the ranges the user has specified in the knobs knobs = wf.execution_strategy[\"knobs\"] #", "= recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object) # create a new experiment to", "a list of variable names and a list of knob (from,to) variables =", "minimization function a callback to execute # it uses the return value (it", "import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self", "we call and that returns a value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables,", "variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design,", "sort them to avoid dimension errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value", "experiment to run in execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] =", "= tuple([min_value, max_value]) range_tuples.append(tpl) # we give the minimization function a callback to", "unordered, so sort them to avoid dimension errors of scikit min_value = min(float(knobs[key][0]),", "to avoid dimension errors of scikit min_value = min(float(knobs[key][0]), float(knobs[key][1])) max_value = max(float(knobs[key][0]),", "float(knobs[key][1])) max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we give", "optimizing strategy \"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments =", "range_tuples = [] # we fill the arrays and use the index to", "to variable for key in knobs: variables.append(key) # values in knobs might come", "and that returns a value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object", "use the index to map from gauss-optimizer-value to variable for key in knobs:", "(from,to) variables = [] range_tuples = [] # we fill the arrays and", "import * from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import", "\"\"\" recreates knob values from a variable \"\"\" knob_object = {} # create", "names and a list of knob (from,to) variables = [] range_tuples = []", "acquisition_method, Fore.CYAN) # we look at the ranges the user has specified in", "= max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we give the minimization", "from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing strategy", "call and that returns a value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values)", "run in execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"]", "the index to map from gauss-optimizer-value to variable for key in knobs: variables.append(key)", "the knobs knobs = wf.execution_strategy[\"knobs\"] # we create a list of variable names", "exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"] exp[\"knobs\"] = knob_object wf.setup_stage(wf, exp[\"knobs\"]) return experimentFunction(wf,", "create a list of variable names and a list of knob (from,to) variables", "max_value]) range_tuples.append(tpl) # we give the minimization function a callback to execute #", "\"\"\" knob_object = {} # create the knobObject based on the position of", "the knobObject based on the position of the opti_values and variables in their", "give the minimization function a callback to execute # it uses the return", "a callback to execute # it uses the return value (it tries to", "optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from a variable \"\"\"", "| SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\">", "variable \"\"\" knob_object = {} # create the knobObject based on the position", "to map from gauss-optimizer-value to variable for key in knobs: variables.append(key) # values", "it) to select new knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result =", "import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf)", "self_optimizer_execution(wf, opti_values, variables): \"\"\" this is the function we call and that returns", "variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is done, print results info(\">\") info(\">", "= {} # create the knobObject based on the position of the opti_values", "variables.append(key) # values in knobs might come unordered, so sort them to avoid", "wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method, Fore.CYAN)", "we create a list of variable names and a list of knob (from,to)", "values in knobs might come unordered, so sort them to avoid dimension errors", "optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from a variable \"\"\" knob_object", "\"\"\" info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design", "= wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method, Fore.CYAN) # we look at", "a value for optimization \"\"\" knob_object = recreate_knob_from_optimizer_values(variables, opti_values) print(\"knob_object in self_opt_execution\", knob_object)", "of variable names and a list of knob (from,to) variables = [] range_tuples", "float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we give the minimization function a", "= gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer is", "range_tuples.append(tpl) # we give the minimization function a callback to execute # it", "on the position of the opti_values and variables in their array for idx,", "in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this", "opti_values): \"\"\" recreates knob values from a variable \"\"\" knob_object = {} #", "in their array for idx, val in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object", "experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\"", "create a new experiment to run in execution exp = dict() exp[\"ignore_first_n_samples\"] =", "is done, print results info(\">\") info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x)))", "Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer |", "# optimizer is done, print results info(\">\") info(\"> OptimalResult | Knobs: \" +", "oeda.log import * from oeda.rtxlib.execution import experimentFunction from oeda.rtxlib.executionstrategy import applyInitKnobs from oeda.rtxlib.executionstrategy", "list of knob (from,to) variables = [] range_tuples = [] # we fill", "applyInitKnobs from oeda.rtxlib.executionstrategy import applyDefaultKnobs def start_self_optimizer_strategy(wf): applyInitKnobs(wf) \"\"\" executes a self optimizing", "info(\"> ExecStrategy | SelfOptimizer\", Fore.CYAN) acquisition_method = wf.execution_strategy[\"acquisition_method\"] wf.totalExperiments = wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design =", "for idx, val in enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values,", "arrays and use the index to map from gauss-optimizer-value to variable for key", "the user has specified in the knobs knobs = wf.execution_strategy[\"knobs\"] # we create", "to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables),", "\" + acquisition_method, Fore.CYAN) # we look at the ranges the user has", "enumerate(variables): knob_object[val] = opti_values[idx] return knob_object def self_optimizer_execution(wf, opti_values, variables): \"\"\" this is", "values from a variable \"\"\" knob_object = {} # create the knobObject based", "to run in execution exp = dict() exp[\"ignore_first_n_samples\"] = wf.primary_data_provider[\"ignore_first_n_samples\"] exp[\"sample_size\"] = wf.execution_strategy[\"sample_size\"]", "knobs: variables.append(key) # values in knobs might come unordered, so sort them to", "at the ranges the user has specified in the knobs knobs = wf.execution_strategy[\"knobs\"]", "max_value = max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we give the", "to select new knobs to test print(\"variables\", variables) print(\"range_tuples\", range_tuples) optimizer_result = gp_minimize(lambda", "applyDefaultKnobs(wf) return recreate_knob_from_optimizer_values(variables, optimizer_result.x), optimizer_result.fun def recreate_knob_from_optimizer_values(variables, opti_values): \"\"\" recreates knob values from", "<filename>Backend/oeda/rtxlib/executionstrategy/SelfOptimizerStrategy.py from colorama import Fore from skopt import gp_minimize from oeda.log import *", "variables in their array for idx, val in enumerate(variables): knob_object[val] = opti_values[idx] return", "and use the index to map from gauss-optimizer-value to variable for key in", "info(\">\") info(\"> OptimalResult | Knobs: \" + str(recreate_knob_from_optimizer_values(variables, optimizer_result.x))) info(\"> | Result: \"", "map from gauss-optimizer-value to variable for key in knobs: variables.append(key) # values in", "wf.execution_strategy[\"optimizer_iterations\"] optimizer_iterations_in_design = wf.execution_strategy[\"optimizer_iterations_in_design\"] info(\"> Optimizer | \" + acquisition_method, Fore.CYAN) # we", "Fore.CYAN) # we look at the ranges the user has specified in the", "might come unordered, so sort them to avoid dimension errors of scikit min_value", "max(float(knobs[key][0]), float(knobs[key][1])) tpl = tuple([min_value, max_value]) range_tuples.append(tpl) # we give the minimization function", "optimizer_result = gp_minimize(lambda opti_values: self_optimizer_execution(wf, opti_values, variables), range_tuples, n_calls=wf.totalExperiments, n_random_starts=optimizer_iterations_in_design, acq_func=acquisition_method) # optimizer" ]
[ "1)) target_x = xvalues funcvalues = xvalues**2 target_y = funcvalues mu = xvalues", "target_y = funcvalues mu = xvalues cov_matrix = torch.normal(0, 0.001, (1, 400, 1))", "0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx,", "and cols[2] == 0) def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2)", "= torch.normal(0, 1, (1, 400, 1)) target_x = xvalues funcvalues = xvalues**2 target_y", "cov_matrix = torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x,", "(cols[0] == 0 and cols[2] == 0) def test_get_colour_based_idx(): img = torch.tensor([0, 0,", "* 28))[None, :] width = 28 height = 28 Plotter.paint_groundtruth_greyscale( func_x=func_x, width=width, height=height)", "as file: train_loss = [float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file:", "white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1 and len(white_idx) ==", "xvalues funcvalues = xvalues**2 target_y = funcvalues mu = xvalues cov_matrix = torch.normal(0,", "mu = torch.rand((28 * 28))[None, :] width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu,", "0) def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0,", "2) assert (rows[0] == 0 and rows[2] == 1) assert (cols[0] == 0", "Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1,", "0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx, black_idx =", "test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols", "target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :] width =", "torch.rand((28 * 28))[None, :] width = 28 height = 28 Plotter.paint_groundtruth_greyscale( func_x=func_x, width=width,", "28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :] width", "cols = torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0] ==", "1) assert (cols[0] == 0 and cols[2] == 0) def test_get_colour_based_idx(): img =", "1]) cols = torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0]", "line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for line in", "= torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1", "target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :] width", "1) assert (black_idx[0] == 0 and len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt')", "0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def", "cols[2] == 0) def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows", "black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1 and len(white_idx) == 1)", "assert (black_idx[0] == 0 and len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as", "cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :] width = 28 height", "Plotter import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3]) rows, cols", "== 1) assert (black_idx[0] == 0 and len(black_idx) == 1) def test_plot_training_progress(): with", "28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 *", "== 1) assert (cols[0] == 0 and cols[2] == 0) def test_get_colour_based_idx(): img", "1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for line in", "= xvalues funcvalues = xvalues**2 target_y = funcvalues mu = xvalues cov_matrix =", "torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400, 1)) target_x = xvalues funcvalues =", "train_loss = [float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss =", "height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None,", "== 0 and rows[2] == 1) assert (cols[0] == 0 and cols[2] ==", "0 and cols[2] == 0) def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2,", "in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for line in file.readlines()]", "400, 1)) target_x = xvalues funcvalues = xvalues**2 target_y = funcvalues mu =", "torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert", "for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10]", ":] width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x", "and len(white_idx) == 1) assert (black_idx[0] == 0 and len(black_idx) == 1) def", "= torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img)", "contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 *", "func_x = torch.rand((28 * 28))[None, :] width = 28 height = 28 Plotter.paint_groundtruth_greyscale(", "torch.normal(0, 1, (1, 400, 1)) target_x = xvalues funcvalues = xvalues**2 target_y =", "= torch.tensor([0, 1, 2, 3]) rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0] ==", "= get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1 and len(white_idx) == 1) assert", "def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for line in file.readlines()]", "== 0 and cols[2] == 0) def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1,", "len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for", "test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for line in file.readlines()] with", "400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu", "file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for line in file.readlines()] Plotter.plot_training_progress(", "as file: vali_loss = [float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000)", "torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix)", "file: train_loss = [float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss", "1 and len(white_idx) == 1) assert (black_idx[0] == 0 and len(black_idx) == 1)", "torch.rand((28 * 28))[None, :] width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height)", "== 1 and len(white_idx) == 1) assert (black_idx[0] == 0 and len(black_idx) ==", "with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt')", "len(white_idx) == 1) assert (black_idx[0] == 0 and len(black_idx) == 1) def test_plot_training_progress():", "funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :]", "def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400, 1)) target_x", "(white_idx[0] == 1 and len(white_idx) == 1) assert (black_idx[0] == 0 and len(black_idx)", "0 and len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss =", "with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss,", "def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :] width = 28 height =", "== 0 and len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss", "1, (1, 400, 1)) target_x = xvalues funcvalues = xvalues**2 target_y = funcvalues", "def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :] width = 28 height =", "3]) rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and rows[2] ==", "cols, img) assert (white_idx[0] == 1 and len(white_idx) == 1) assert (black_idx[0] ==", "cols = get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and rows[2] == 1) assert", "xvalues = torch.normal(0, 1, (1, 400, 1)) target_x = xvalues funcvalues = xvalues**2", "= xvalues cov_matrix = torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues,", "get_colour_based_idx, Plotter import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3]) rows,", "img) assert (white_idx[0] == 1 and len(white_idx) == 1) assert (black_idx[0] == 0", "== 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for line", "* 28))[None, :] width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def", "1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu =", "width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x =", "2, 3]) rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and rows[2]", "1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1 and len(white_idx)", "xvalues**2 target_y = funcvalues mu = xvalues cov_matrix = torch.normal(0, 0.001, (1, 400,", "and len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line)", "28))[None, :] width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale():", "height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :] width = 28 height", "cnp.plotting import get_contxt_coordinates, get_colour_based_idx, Plotter import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1,", "file: vali_loss = [float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def", "for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for line", "target_x = xvalues funcvalues = xvalues**2 target_y = funcvalues mu = xvalues cov_matrix", "mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :] width = 28", "width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :] width = 28", "test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None, :] width = 28 height = 28", "0 and rows[2] == 1) assert (cols[0] == 0 and cols[2] == 0)", "mu = xvalues cov_matrix = torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues,", "2) rows = torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows,", "get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1 and len(white_idx) == 1) assert (black_idx[0]", "def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1])", "= [float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line)", "= torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols = torch.tensor([0,", "contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400, 1)) target_x = xvalues", "= torch.rand((28 * 28))[None, :] width = 28 height = 28 Plotter.paint_groundtruth_greyscale( func_x=func_x,", "= get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and rows[2] == 1) assert (cols[0]", "import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3]) rows, cols =", "= 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28", "get_contxt_coordinates, get_colour_based_idx, Plotter import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3])", "1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx, black_idx", "rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and rows[2] == 1)", "rows = torch.tensor([0, 1]) cols = torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols,", "assert (white_idx[0] == 1 and len(white_idx) == 1) assert (black_idx[0] == 0 and", "1, 2, 3]) rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and", "torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3]) rows, cols = get_contxt_coordinates(contxt,", "torch.tensor([0, 1, 2, 3]) rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0] == 0", "(rows[0] == 0 and rows[2] == 1) assert (cols[0] == 0 and cols[2]", "test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3]) rows, cols = get_contxt_coordinates(contxt, 2) assert", "(1, 400, 1)) target_x = xvalues funcvalues = xvalues**2 target_y = funcvalues mu", "get_contxt_coordinates(contxt, 2) assert (rows[0] == 0 and rows[2] == 1) assert (cols[0] ==", "= funcvalues mu = xvalues cov_matrix = torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d(", "file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0,", "test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400, 1)) target_x =", "and rows[2] == 1) assert (cols[0] == 0 and cols[2] == 0) def", "test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :] width = 28 height = 28", "= torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400, 1)) target_x = xvalues funcvalues", "rows[2] == 1) assert (cols[0] == 0 and cols[2] == 0) def test_get_colour_based_idx():", "[float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for", "interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400, 1))", "(black_idx[0] == 0 and len(black_idx) == 1) def test_plot_training_progress(): with open('tests/fixtures/train_loss.txt') as file:", "xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28 * 28))[None,", "in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues =", "torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols = torch.tensor([0, 1])", "<gh_stars>0 from cnp.plotting import get_contxt_coordinates, get_colour_based_idx, Plotter import torch def test_get_contxt_coordinates(): contxt =", "contxt = torch.tensor([0, 1, 2, 3]) rows, cols = get_contxt_coordinates(contxt, 2) assert (rows[0]", "torch.tensor([0, 1]) white_idx, black_idx = get_colour_based_idx(rows, cols, img) assert (white_idx[0] == 1 and", "xvalues cov_matrix = torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y,", "vali_loss = [float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d():", "= [float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx", "import get_contxt_coordinates, get_colour_based_idx, Plotter import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2,", "img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows = torch.tensor([0, 1]) cols =", "= torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu,", "assert (rows[0] == 0 and rows[2] == 1) assert (cols[0] == 0 and", "def test_get_contxt_coordinates(): contxt = torch.tensor([0, 1, 2, 3]) rows, cols = get_contxt_coordinates(contxt, 2)", "line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues", "== 0) def test_get_colour_based_idx(): img = torch.tensor([0, 0, 1, 0.2]).reshape(2, 2) rows =", "from cnp.plotting import get_contxt_coordinates, get_colour_based_idx, Plotter import torch def test_get_contxt_coordinates(): contxt = torch.tensor([0,", "vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1, 400,", "open('tests/fixtures/vali_loss.txt') as file: vali_loss = [float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss,", "= torch.rand((28 * 28))[None, :] width = 28 height = 28 Plotter.paint_prediction_greyscale(mu=mu, width=width,", "= xvalues**2 target_y = funcvalues mu = xvalues cov_matrix = torch.normal(0, 0.001, (1,", "Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale(): mu = torch.rand((28", "funcvalues = xvalues**2 target_y = funcvalues mu = xvalues cov_matrix = torch.normal(0, 0.001,", "= 28 Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :]", "open('tests/fixtures/train_loss.txt') as file: train_loss = [float(line) for line in file.readlines()] with open('tests/fixtures/vali_loss.txt') as", "[float(line) for line in file.readlines()] Plotter.plot_training_progress( training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx =", "training_losses=train_loss, vali_losses=vali_loss, interval=1000) def test_plot_context_target_1d(): contxt_idx = torch.randperm(400)[:10] xvalues = torch.normal(0, 1, (1,", "Plotter.paint_prediction_greyscale(mu=mu, width=width, height=height) def test_paint_groundtruth_greyscale(): func_x = torch.rand((28 * 28))[None, :] width =", "(1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx, xvalues=xvalues, funcvalues=funcvalues, target_y=target_y, target_x=target_x, mu=mu, cov_matrix=cov_matrix) def test_paint_prediction_greyscale():", "assert (cols[0] == 0 and cols[2] == 0) def test_get_colour_based_idx(): img = torch.tensor([0,", "funcvalues mu = xvalues cov_matrix = torch.normal(0, 0.001, (1, 400, 1)) Plotter.plot_context_target_1d( contxt_idx=contxt_idx," ]
[ "from django.conf.urls import url, include from rest_framework import routers from crm_inbox.flows import *", "crm_inbox.flows import * # noqa from processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process',", "django.conf.urls import url, include from rest_framework import routers from crm_inbox.flows import * #", "processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [ url(r'^process/', include('processlib.urls',", "import routers from crm_inbox.flows import * # noqa from processlib.views import (ProcessViewSet) router", "# noqa from processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns =", "noqa from processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [", "import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [ url(r'^process/', include('processlib.urls', namespace='processlib')),", "(ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [ url(r'^process/', include('processlib.urls', namespace='processlib')), url(r'^api/',", "from processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [ url(r'^process/',", "from rest_framework import routers from crm_inbox.flows import * # noqa from processlib.views import", "include from rest_framework import routers from crm_inbox.flows import * # noqa from processlib.views", "= routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [ url(r'^process/', include('processlib.urls', namespace='processlib')), url(r'^api/', include(router.urls)), ]", "from crm_inbox.flows import * # noqa from processlib.views import (ProcessViewSet) router = routers.DefaultRouter()", "import * # noqa from processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet)", "url, include from rest_framework import routers from crm_inbox.flows import * # noqa from", "router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns = [ url(r'^process/', include('processlib.urls', namespace='processlib')), url(r'^api/', include(router.urls)),", "rest_framework import routers from crm_inbox.flows import * # noqa from processlib.views import (ProcessViewSet)", "* # noqa from processlib.views import (ProcessViewSet) router = routers.DefaultRouter() router.register('process', ProcessViewSet) urlpatterns", "routers from crm_inbox.flows import * # noqa from processlib.views import (ProcessViewSet) router =", "import url, include from rest_framework import routers from crm_inbox.flows import * # noqa" ]
[ "import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations", "] operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)),", "import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations = [", "Generated by Django 2.0.5 on 2018-05-17 04:40 from django.db import migrations import wagtail.core.blocks", "'0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)), ('paragraph',", "[ ('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title',", "[ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)), ('image', wagtail.images.blocks.ImageChooserBlock(required=False)))), ),", "on 2018-05-17 04:40 from django.db import migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks", "Django 2.0.5 on 2018-05-17 04:40 from django.db import migrations import wagtail.core.blocks import wagtail.core.fields", "django.db import migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies =", "wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ]", "= [ ('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full", "dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading',", "# Generated by Django 2.0.5 on 2018-05-17 04:40 from django.db import migrations import", "04:40 from django.db import migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration):", "<reponame>marcanuy/keraban # Generated by Django 2.0.5 on 2018-05-17 04:40 from django.db import migrations", "operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)), ('image',", "2.0.5 on 2018-05-17 04:40 from django.db import migrations import wagtail.core.blocks import wagtail.core.fields import", "from django.db import migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies", "import migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [", "class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage',", "2018-05-17 04:40 from django.db import migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class", "import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'),", "migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)), ('image', wagtail.images.blocks.ImageChooserBlock(required=False)))), ), ]", "('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)),", "Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField( model_name='standardpage', name='body',", "migrations import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home',", "= [ migrations.AlterField( model_name='standardpage', name='body', field=wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title', required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)), ('image', wagtail.images.blocks.ImageChooserBlock(required=False)))),", "wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations = [ migrations.AlterField(", "wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('home', '0003_standardpage_subtitle'), ] operations =", "by Django 2.0.5 on 2018-05-17 04:40 from django.db import migrations import wagtail.core.blocks import" ]
[ "python3 # -*- coding: utf-8 -*- if __name__ == \"__main__\": a = {0,", "{0, 1, 2, 3} print(len(a)) a.add(4) print(a) a = {0, 1, 2, 3}", "a = {0, 1, 2, 3} a.remove(3) print(a) a = {0, 1, 2,", "print(a) a = {0, 1, 2, 3} a.remove(3) print(a) a = {0, 1,", "utf-8 -*- if __name__ == \"__main__\": a = {0, 1, 2, 3} print(len(a))", "= {0, 1, 2, 3} a.remove(3) print(a) a = {0, 1, 2, 3}", "= {0, 1, 2, 3} print(len(a)) a.add(4) print(a) a = {0, 1, 2,", "# -*- coding: utf-8 -*- if __name__ == \"__main__\": a = {0, 1,", "a.add(4) print(a) a = {0, 1, 2, 3} a.remove(3) print(a) a = {0,", "\"__main__\": a = {0, 1, 2, 3} print(len(a)) a.add(4) print(a) a = {0,", "if __name__ == \"__main__\": a = {0, 1, 2, 3} print(len(a)) a.add(4) print(a)", "1, 2, 3} a.remove(3) print(a) a = {0, 1, 2, 3} a.clear() print(a)", "-*- coding: utf-8 -*- if __name__ == \"__main__\": a = {0, 1, 2,", "a = {0, 1, 2, 3} print(len(a)) a.add(4) print(a) a = {0, 1,", "{0, 1, 2, 3} a.remove(3) print(a) a = {0, 1, 2, 3} a.clear()", "__name__ == \"__main__\": a = {0, 1, 2, 3} print(len(a)) a.add(4) print(a) a", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- if __name__ == \"__main__\": a =", "-*- if __name__ == \"__main__\": a = {0, 1, 2, 3} print(len(a)) a.add(4)", "coding: utf-8 -*- if __name__ == \"__main__\": a = {0, 1, 2, 3}", "2, 3} print(len(a)) a.add(4) print(a) a = {0, 1, 2, 3} a.remove(3) print(a)", "print(len(a)) a.add(4) print(a) a = {0, 1, 2, 3} a.remove(3) print(a) a =", "3} print(len(a)) a.add(4) print(a) a = {0, 1, 2, 3} a.remove(3) print(a) a", "1, 2, 3} print(len(a)) a.add(4) print(a) a = {0, 1, 2, 3} a.remove(3)", "== \"__main__\": a = {0, 1, 2, 3} print(len(a)) a.add(4) print(a) a =" ]
[ "create_app() # runs this only when the environment is 'development' if settings.ENVIRONMENT ==", "create_app application = create_app() # runs this only when the environment is 'development'", "from dynaconf import settings from app import create_app application = create_app() # runs", "application = create_app() # runs this only when the environment is 'development' if", "dynaconf import settings from app import create_app application = create_app() # runs this", "environment is 'development' if settings.ENVIRONMENT == \"development\" and settings.GUNICORN is False: application.run(host=\"0.0.0.0\", port=settings.FLASK_CONFIG.PORT,", "runs this only when the environment is 'development' if settings.ENVIRONMENT == \"development\" and", "= create_app() # runs this only when the environment is 'development' if settings.ENVIRONMENT", "import settings from app import create_app application = create_app() # runs this only", "the environment is 'development' if settings.ENVIRONMENT == \"development\" and settings.GUNICORN is False: application.run(host=\"0.0.0.0\",", "this only when the environment is 'development' if settings.ENVIRONMENT == \"development\" and settings.GUNICORN", "from app import create_app application = create_app() # runs this only when the", "is 'development' if settings.ENVIRONMENT == \"development\" and settings.GUNICORN is False: application.run(host=\"0.0.0.0\", port=settings.FLASK_CONFIG.PORT, debug=True)", "import create_app application = create_app() # runs this only when the environment is", "settings from app import create_app application = create_app() # runs this only when", "only when the environment is 'development' if settings.ENVIRONMENT == \"development\" and settings.GUNICORN is", "# runs this only when the environment is 'development' if settings.ENVIRONMENT == \"development\"", "app import create_app application = create_app() # runs this only when the environment", "when the environment is 'development' if settings.ENVIRONMENT == \"development\" and settings.GUNICORN is False:" ]
[ "exists(self, path: str): pass @abstractmethod def put(self, path: str, content: str, overwrite: bool", "makedirs(self, path: str): pass @abstractmethod def copy(self, source: str, destination: str, recursive: bool", "put(self, path: str, content: str, overwrite: bool = False): pass @abstractmethod def makedirs(self,", "str): pass @abstractmethod def put(self, path: str, content: str, overwrite: bool = False):", "overwrite: bool = False): pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod def", "False): pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod def copy(self, source: str,", "recursive: bool = False): pass @abstractmethod def delete(self, path: str, recursive: bool =", "def exists(self, path: str): pass @abstractmethod def put(self, path: str, content: str, overwrite:", "abstractmethod class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod def put(self, path:", "FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod def put(self, path: str, content:", "import ABC, abstractmethod class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod def", "destination: str, recursive: bool = False): pass @abstractmethod def delete(self, path: str, recursive:", "abc import ABC, abstractmethod class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod", "ABC, abstractmethod class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod def put(self,", "source: str, destination: str, recursive: bool = False): pass @abstractmethod def move(self, source:", "def copy(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod def", "bool = False): pass @abstractmethod def move(self, source: str, destination: str, recursive: bool", "path: str): pass @abstractmethod def copy(self, source: str, destination: str, recursive: bool =", "pass @abstractmethod def move(self, source: str, destination: str, recursive: bool = False): pass", "recursive: bool = False): pass @abstractmethod def move(self, source: str, destination: str, recursive:", "str, content: str, overwrite: bool = False): pass @abstractmethod def makedirs(self, path: str):", "@abstractmethod def makedirs(self, path: str): pass @abstractmethod def copy(self, source: str, destination: str,", "str, recursive: bool = False): pass @abstractmethod def move(self, source: str, destination: str,", "str, destination: str, recursive: bool = False): pass @abstractmethod def move(self, source: str,", "move(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod def delete(self,", "content: str, overwrite: bool = False): pass @abstractmethod def makedirs(self, path: str): pass", "bool = False): pass @abstractmethod def delete(self, path: str, recursive: bool = False):", "class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod def put(self, path: str,", "str, overwrite: bool = False): pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod", "str, recursive: bool = False): pass @abstractmethod def delete(self, path: str, recursive: bool", "= False): pass @abstractmethod def delete(self, path: str, recursive: bool = False): pass", "= False): pass @abstractmethod def move(self, source: str, destination: str, recursive: bool =", "@abstractmethod def copy(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod", "source: str, destination: str, recursive: bool = False): pass @abstractmethod def delete(self, path:", "destination: str, recursive: bool = False): pass @abstractmethod def move(self, source: str, destination:", "@abstractmethod def put(self, path: str, content: str, overwrite: bool = False): pass @abstractmethod", "str, destination: str, recursive: bool = False): pass @abstractmethod def delete(self, path: str,", "@abstractmethod def exists(self, path: str): pass @abstractmethod def put(self, path: str, content: str,", "pass @abstractmethod def put(self, path: str, content: str, overwrite: bool = False): pass", "= False): pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod def copy(self, source:", "path: str): pass @abstractmethod def put(self, path: str, content: str, overwrite: bool =", "str): pass @abstractmethod def copy(self, source: str, destination: str, recursive: bool = False):", "def put(self, path: str, content: str, overwrite: bool = False): pass @abstractmethod def", "path: str, content: str, overwrite: bool = False): pass @abstractmethod def makedirs(self, path:", "pass @abstractmethod def copy(self, source: str, destination: str, recursive: bool = False): pass", "copy(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod def move(self,", "def move(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod def", "bool = False): pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod def copy(self,", "def makedirs(self, path: str): pass @abstractmethod def copy(self, source: str, destination: str, recursive:", "from abc import ABC, abstractmethod class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass", "@abstractmethod def move(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod", "pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod def copy(self, source: str, destination:", "False): pass @abstractmethod def move(self, source: str, destination: str, recursive: bool = False):" ]
[ "e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to set training mode (\"True\") or", "backbone output of ResNet # to the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim)", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "# Zero-init for m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m,", "layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates", "affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to set training mode (\"True\")", "resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs import utils", "module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet( arch_name, block,", "): \"\"\"ResNet-50 model from \"Deep Residual Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\",", "Make first convolution use a 3x3 kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3", "This is the empirical approximation for initialization the vMF # distributions for each", "1 if stride != 1 or self.inplanes != planes * block.expansion: downsample =", "on certain modules. See documentations of particular modules for details of their behaviors", "vMF # distributions for each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim)", "mode during training self.set_bn_eval = set_bn_eval # Make first convolution use a 3x3", "block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ):", "embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model", "of particular modules for details of their behaviors in training/evaluation mode, if they", "bias=False) if self.use_vmf: # This is the empirical approximation for initialization the vMF", "!= 3: raise ValueError( \"replace_stride_with_dilation should be None \" \"or a 3-element tuple,", "stride with a dilated convolution instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation)", "if dilate: self.dilation *= stride stride = 1 if stride != 1 or", "this file except in compliance with the License. # You may obtain a", "if self.set_bn_eval: for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self def", "self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self,", "if self.use_vmf: # This is the empirical approximation for initialization the vMF #", "= nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu =", "n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None,", "only on certain modules. See documentations of particular modules for details of their", "layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation is", "= nn.BatchNorm2d # Fixes batch-norm to eval mode during training self.set_bn_eval = set_bn_eval", "is the empirical approximation for initialization the vMF # distributions for each class", "# Copyright 2021 The vMF Embeddings Authors. # # Licensed under the Apache", "self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This is the empirical approximation", "during training self.set_bn_eval = set_bn_eval # Make first convolution use a 3x3 kernel", "ANY KIND, either express or implied. # See the License for the specific", "that remaps from the backbone output of ResNet # to the embedding dimensionality", "to eval mode during training self.set_bn_eval = set_bn_eval # Make first convolution use", "# Each element in the tuple indicates if we should replace # the", "embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if", "3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7,", "= 512 * block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to eval mode", "= nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\")", "block.expansion, stride), norm_layer(planes * block.expansion), ) layers = [] layers.append( block( self.inplanes, planes,", "stride), norm_layer(planes * block.expansion), ) layers = [] layers.append( block( self.inplanes, planes, stride,", "learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual Learning for Image", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= self.dilation if dilate: self.dilation *= stride stride = 1 if stride !=", "governing permissions and # limitations under the License. \"\"\"Class for instantiating a ResNet", "bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 =", "progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet", "Returns: self \"\"\" self.training = mode for module in self.children(): module.train(mode) if self.set_bn_eval:", "model from \"Deep Residual Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3,", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3,", "\" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group", "3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool", "self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2], stride=2,", "resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer", "use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer = nn.BatchNorm2d #", "else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes)", "= mode for module in self.children(): module.train(mode) if self.set_bn_eval: for module in self.modules():", "): \"\"\"Initializes a ResNet architecture object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes,", "OF ANY KIND, either express or implied. # See the License for the", "nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for m", "= nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), ) layers =", "block, planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample = None previous_dilation =", "progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual Learning for Image Recognition\".\"\"\" return _resnet(", "# distributions for each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes", "the module in training mode. This has any effect only on certain modules.", "a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group if self.first_conv_3x3:", "in the tuple indicates if we should replace # the 2x2 stride with", "False, False] if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should be None \"", "self.inplanes != planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride),", "256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool", "layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if isinstance(m,", "self.set_bn_eval = set_bn_eval # Make first convolution use a 3x3 kernel for CIFAR", "ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval,", "(nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for m in self.modules(): if", "_resnet( arch_name, block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp,", "len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should be None \" \"or a 3-element", "1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes *", "\"\"\" import logging import torch.nn as nn from torchvision.models import resnet from torchvision.models.utils", "import logging import torch.nn as nn from torchvision.models import resnet from torchvision.models.utils import", "in training mode. This has any effect only on certain modules. See documentations", "learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim,", "layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool =", "[] layers.append( block( self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes", "downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes * block.expansion for _", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "tuple indicates if we should replace # the 2x2 stride with a dilated", "nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 =", "mode=True): \"\"\"Sets the module in training mode. This has any effect only on", "for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d,", "block.expansion for _ in range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation,", "datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer that remaps from the backbone output", "each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation", "layers[0]) self.layer2 = self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as nn from", "object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features", "if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block,", "are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to set training mode", "def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4,", "use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet", "self.classifier, ) def train(self, mode=True): \"\"\"Sets the module in training mode. This has", "layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object. See arguments in", "= first_conv_3x3 # Linear layer that remaps from the backbone output of ResNet", "ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn", "for _ in range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer,", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "Authors. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "self.first_conv_3x3 = first_conv_3x3 # Linear layer that remaps from the backbone output of", "distributions for each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True):", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if", "mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to", "they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to set training", "\"\"\"Instantiates a ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp,", "utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\" def", "required by applicable law or agreed to in writing, software # distributed under", "self.embedding_dim) self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation is None: # Each", "See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features =", "applicable law or agreed to in writing, software # distributed under the License", "log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\" def __init__(", "to set training mode (\"True\") or evaluation mode (\"False\"). Returns: self \"\"\" self.training", "self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes * block.expansion for _ in range(1,", "specific language governing permissions and # limitations under the License. \"\"\"Class for instantiating", "set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model =", "set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep", "self.use_vmf: # This is the empirical approximation for initialization the vMF # distributions", "the vMF # distributions for each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence,", "import torch.nn as nn from torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url from", "(\"True\") or evaluation mode (\"False\"). Returns: self \"\"\" self.training = mode for module", "or agreed to in writing, software # distributed under the License is distributed", "n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer = nn.BatchNorm2d", "1)) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m,", "= groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3,", "nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes,", "first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model = ResNet(", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion", "kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to", "etc. Args: mode: whether to set training mode (\"True\") or evaluation mode (\"False\").", "= self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3],", "for initialization the vMF # distributions for each class in the final layer.", "self.base_width = width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1,", "Embeddings Authors. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1 =", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "planes * block.expansion for _ in range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups,", "Each element in the tuple indicates if we should replace # the 2x2", "whether to set training mode (\"True\") or evaluation mode (\"False\"). Returns: self \"\"\"", "writing, software # distributed under the License is distributed on an \"AS IS\"", "\"\"\"Initializes a ResNet architecture object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf,", "if stride != 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential(", "self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to eval mode during training self.set_bn_eval =", "nn.BatchNorm2d # Fixes batch-norm to eval mode during training self.set_bn_eval = set_bn_eval #", "nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True)", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "norm_layer = self._norm_layer downsample = None previous_dilation = self.dilation if dilate: self.dilation *=", "in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 *", "License. # You may obtain a copy of the License at # #", "nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) #", "vMF Embeddings Authors. # # Licensed under the Apache License, Version 2.0 (the", "for module in self.children(): module.train(mode) if self.set_bn_eval: for module in self.modules(): if isinstance(module,", "a ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp,", "def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers,", "compliance with the License. # You may obtain a copy of the License", "in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet( arch_name, block, layers,", "self.dilation if dilate: self.dilation *= stride stride = 1 if stride != 1", "layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 =", "model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf,", "learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer = nn.BatchNorm2d # Fixes", "padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1", "for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress,", "2x2 stride with a dilated convolution instead replace_stride_with_dilation = [False, False, False] if", "pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a", "a ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import", "use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading ResNet50 from Pytorch", "range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers)", "log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder()", "downsample = None previous_dilation = self.dilation if dilate: self.dilation *= stride stride =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "Fixes batch-norm to eval mode during training self.set_bn_eval = set_bn_eval # Make first", "norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool,", "nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif", "m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):", "self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets the module in", "of ResNet # to the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier", "The vMF Embeddings Authors. # # Licensed under the Apache License, Version 2.0", "[False, False, False] if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should be None", "if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to set", "block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder", "convolution use a 3x3 kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear", "64, layers[0]) self.layer2 = self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer(", "= 1 if stride != 1 or self.inplanes != planes * block.expansion: downsample", "stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2,", "for instantiating a ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import", "resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks,", "= logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\" def __init__( self,", "set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes", "def _make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample = None", "self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 =", "not use this file except in compliance with the License. # You may", "layers = [] layers.append( block( self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer,", "[3, 4, 6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp,", "= self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2],", "ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, ) if", "self.layer2 = self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256,", "self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, )", "block.expansion), ) layers = [] layers.append( block( self.inplanes, planes, stride, downsample, self.groups, self.base_width,", "= self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m", "Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress, n_classes, embedding_dim,", "4, 6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence,", "Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained,", "License, Version 2.0 (the \"License\"); # you may not use this file except", "= nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap,", "embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from", "modules. See documentations of particular modules for details of their behaviors in training/evaluation", "# Make first convolution use a 3x3 kernel for CIFAR datasets self.first_conv_3x3 =", "if replace_stride_with_dilation is None: # Each element in the tuple indicates if we", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "def _resnet( arch_name, block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp,", "n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, ) if pretrained:", "kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object. See", "previous_dilation, norm_layer, )) self.inplanes = planes * block.expansion for _ in range(1, blocks):", "_make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample = None previous_dilation", "and # limitations under the License. \"\"\"Class for instantiating a ResNet in PyTorch.", "elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer", "# you may not use this file except in compliance with the License.", "set_bn_eval # Make first convolution use a 3x3 kernel for CIFAR datasets self.first_conv_3x3", "= nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block,", "or self.inplanes != planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion,", "agreed to in writing, software # distributed under the License is distributed on", "architecture object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence)", "training/evaluation mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether", "stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)", "utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation is None:", "in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation = 1", "model.create_encoder() return model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp,", "(the \"License\"); # you may not use this file except in compliance with", "utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets the module in training mode.", "self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets the module in training mode. This", "the backbone output of ResNet # to the embedding dimensionality self.remap = nn.Linear(self.backbone_features,", "downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), ) layers", "self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet( arch_name, block, layers, pretrained,", "init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual Learning for Image Recognition\".\"\"\"", "# Unless required by applicable law or agreed to in writing, software #", "padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block, 128, layers[1], stride=2,", "by applicable law or agreed to in writing, software # distributed under the", "set training mode (\"True\") or evaluation mode (\"False\"). Returns: self \"\"\" self.training =", "stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block, 128, layers[1],", "use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual Learning for", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "64 self.dilation = 1 if replace_stride_with_dilation is None: # Each element in the", "*= stride stride = 1 if stride != 1 or self.inplanes != planes", "nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3,", "use a 3x3 kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer", "Linear layer that remaps from the backbone output of ResNet # to the", "0) # Zero-init for m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif", "final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation", "6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, )", "in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as", "self def _resnet( arch_name, block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf,", "pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False)", "self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0])", "adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as nn from torchvision.models", "self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: #", "should be None \" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups", "a ResNet architecture object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp,", "file except in compliance with the License. # You may obtain a copy", "= nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This is the empirical approximation for", "planes * block.expansion, stride), norm_layer(planes * block.expansion), ) layers = [] layers.append( block(", "first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a", "nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks, stride=1,", "0) def _make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample =", "a ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp,", "Args: mode: whether to set training mode (\"True\") or evaluation mode (\"False\"). Returns:", "License for the specific language governing permissions and # limitations under the License.", "mode (\"True\") or evaluation mode (\"False\"). Returns: self \"\"\" self.training = mode for", "# Linear layer that remaps from the backbone output of ResNet # to", "This has any effect only on certain modules. See documentations of particular modules", "to in writing, software # distributed under the License is distributed on an", "block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2])", "the specific language governing permissions and # limitations under the License. \"\"\"Class for", "self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes *", "implied. # See the License for the specific language governing permissions and #", "for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer that remaps from the", "1 if replace_stride_with_dilation is None: # Each element in the tuple indicates if", "\"License\"); # you may not use this file except in compliance with the", "we should replace # the 2x2 stride with a dilated convolution instead replace_stride_with_dilation", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "coding=utf-8 # Copyright 2021 The vMF Embeddings Authors. # # Licensed under the", "# coding=utf-8 # Copyright 2021 The vMF Embeddings Authors. # # Licensed under", "a 3x3 kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer that", "for each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64", "effect only on certain modules. See documentations of particular modules for details of", "128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4", "the tuple indicates if we should replace # the 2x2 stride with a", "groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object. See arguments in arch.py.\"\"\"", "self._norm_layer downsample = None previous_dilation = self.dilation if dilate: self.dilation *= stride stride", "self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d(", "nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for m in self.modules(): if isinstance(m, resnet.Bottleneck):", "if isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet( arch_name, block, layers, pretrained, progress,", "from vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a", "previous_dilation = self.dilation if dilate: self.dilation *= stride stride = 1 if stride", "initialization the vMF # distributions for each class in the final layer. utils.vmf_class_weight_init(self.classifier.weight,", "or implied. # See the License for the specific language governing permissions and", "\"Deep Residual Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6,", "import load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs import utils log = logging.getLogger(\"main\")", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2,", "logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\" def __init__( self, n_classes,", "from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs import utils log", "3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1", "ResNet architecture object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp,", "architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict =", "in writing, software # distributed under the License is distributed on an \"AS", "module in self.children(): module.train(mode) if self.set_bn_eval: for module in self.modules(): if isinstance(module, nn.BatchNorm2d):", "class in the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation =", "init_temp, kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict", "instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation", "learn_temp, init_temp, kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\")", "CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer that remaps from the backbone", "from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model", "in training/evaluation mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args: mode:", "raise ValueError( \"replace_stride_with_dilation should be None \" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation))", "None: # Each element in the tuple indicates if we should replace #", "nn.init.constant_(m.bias, 0) # Zero-init for m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0)", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict,", "modules for details of their behaviors in training/evaluation mode, if they are affected,", "= planes * block.expansion for _ in range(1, blocks): layers.append( block( self.inplanes, planes,", "self.inplanes = planes * block.expansion for _ in range(1, blocks): layers.append( block( self.inplanes,", "model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False,", "learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture", "if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias,", "mode. This has any effect only on certain modules. See documentations of particular", "self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation is None: # Each element", "vmf_embeddings.archs import arch from vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class", "dilate=False): norm_layer = self._norm_layer downsample = None previous_dilation = self.dilation if dilate: self.dilation", "= [] layers.append( block( self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, ))", "License. \"\"\"Class for instantiating a ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py", "as nn from torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import", "init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object.", "self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets", "import resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs import", "= ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, )", "indicates if we should replace # the 2x2 stride with a dilated convolution", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "# the 2x2 stride with a dilated convolution instead replace_stride_with_dilation = [False, False,", "you may not use this file except in compliance with the License. #", "dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1))", "3x3 kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer that remaps", "or evaluation mode (\"False\"). Returns: self \"\"\" self.training = mode for module in", "load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50( n_classes, embedding_dim, set_bn_eval,", "the License. \"\"\"Class for instantiating a ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py", "kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 # Linear layer that remaps from", "dilate: self.dilation *= stride stride = 1 if stride != 1 or self.inplanes", "layers.append( block( self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes =", "= [False, False, False] if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should be", "= 64 self.dilation = 1 if replace_stride_with_dilation is None: # Each element in", "dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight,", "planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample = None previous_dilation = self.dilation", "planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes *", "under the License. \"\"\"Class for instantiating a ResNet in PyTorch. Code adapted from:", ":class:`Dropout`, :class:`BatchNorm`, etc. Args: mode: whether to set training mode (\"True\") or evaluation", "certain modules. See documentations of particular modules for details of their behaviors in", "width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object. See arguments in arch.py.\"\"\" super(ResNet,", "self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer(", "block( self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes", "resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained,", "training self.set_bn_eval = set_bn_eval # Make first convolution use a 3x3 kernel for", "* block.expansion, stride), norm_layer(planes * block.expansion), ) layers = [] layers.append( block( self.inplanes,", "n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\"", "use_vmf, learn_temp, init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model = ResNet( n_classes,", "the 2x2 stride with a dilated convolution instead replace_stride_with_dilation = [False, False, False]", "self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0])", "use this file except in compliance with the License. # You may obtain", "if we should replace # the 2x2 stride with a dilated convolution instead", "\"\"\"Sets the module in training mode. This has any effect only on certain", "self \"\"\" self.training = mode for module in self.children(): module.train(mode) if self.set_bn_eval: for", "bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512", "module.eval() return self def _resnet( arch_name, block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval,", "nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), ) layers = []", "resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp,", "resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50", "self.layer4 = self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for", "_resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3,", "* block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion),", "2.0 (the \"License\"); # you may not use this file except in compliance", "self.dilation *= stride stride = 1 if stride != 1 or self.inplanes !=", "# to the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim,", "= set_bn_eval # Make first convolution use a 3x3 kernel for CIFAR datasets", "False] if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should be None \" \"or", "ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence,", "* block.expansion for _ in range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width,", "torch.nn as nn from torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "= nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This", "Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def", "\"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group if", "isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet( arch_name, block, layers, pretrained, progress, n_classes,", "ValueError( \"replace_stride_with_dilation should be None \" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups", "# # Unless required by applicable law or agreed to in writing, software", "* block.expansion), ) layers = [] layers.append( block( self.inplanes, planes, stride, downsample, self.groups,", "express or implied. # See the License for the specific language governing permissions", "with a dilated convolution instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) !=", "None \" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width =", "PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as nn", "to the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes,", "self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This is the", "https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as nn from torchvision.models import resnet from", "super(ResNet, self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer", "\"replace_stride_with_dilation should be None \" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups =", "kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3,", "base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1,", "either express or implied. # See the License for the specific language governing", "documentations of particular modules for details of their behaviors in training/evaluation mode, if", "first_conv_3x3 # Linear layer that remaps from the backbone output of ResNet #", "the final layer. utils.vmf_class_weight_init(self.classifier.weight, self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation = 1 if", "padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)", "for defining a ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf,", "\"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf,", "the empirical approximation for initialization the vMF # distributions for each class in", "particular modules for details of their behaviors in training/evaluation mode, if they are", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "1) nn.init.constant_(m.bias, 0) # Zero-init for m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight,", "): \"\"\"Instantiates a ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf,", "isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes,", "norm_layer, )) self.inplanes = planes * block.expansion for _ in range(1, blocks): layers.append(", "for m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight,", "self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2,", "https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as nn from torchvision.models import resnet", "self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets the module in training", "strict=False) model.create_encoder() return model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp,", "instantiating a ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging", "empirical approximation for initialization the vMF # distributions for each class in the", "stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1,", "for details of their behaviors in training/evaluation mode, if they are affected, e.g.", "the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False)", "is None: # Each element in the tuple indicates if we should replace", "None previous_dilation = self.dilation if dilate: self.dilation *= stride stride = 1 if", "ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return", "vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a ResNet", "self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1)", "return model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence,", "the License. # You may obtain a copy of the License at #", "nn from torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch", "# Fixes batch-norm to eval mode during training self.set_bn_eval = set_bn_eval # Make", "stride=1, dilate=False): norm_layer = self._norm_layer downsample = None previous_dilation = self.dilation if dilate:", ") if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress)", "= load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50( n_classes, embedding_dim,", "width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else:", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "be None \" \"or a 3-element tuple, got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width", "self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes * block.expansion for _ in", "mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for", "blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1])", "nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for m in self.modules(): if isinstance(m,", "nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This is the empirical", "layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self):", "{}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3,", "= nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3,", "training mode. This has any effect only on certain modules. See documentations of", ")) return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1,", "resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), ) layers = [] layers.append(", "= self._norm_layer downsample = None previous_dilation = self.dilation if dilate: self.dilation *= stride", "= self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3", "!= planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes", "kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual Learning for Image Recognition\".\"\"\" return", "\"\"\"Class for instantiating a ResNet in PyTorch. Code adapted from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\"", "blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample = None previous_dilation = self.dilation if", "nn.BatchNorm2d): module.eval() return self def _resnet( arch_name, block, layers, pretrained, progress, n_classes, embedding_dim,", "planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential(", "with the License. # You may obtain a copy of the License at", "init_temp, kappa_confidence, ): \"\"\"Instantiates a ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval,", "output of ResNet # to the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias)", "self).__init__(embedding_dim, n_classes, use_vmf, learn_temp, init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer =", "from vmf_embeddings.archs import arch from vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch):", "512 * block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to eval mode during", "* block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to eval mode during training", "layers, ) if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name],", "of their behaviors in training/evaluation mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`,", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress, n_classes, embedding_dim, set_bn_eval,", "block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules():", "a dilated convolution instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) != 3:", "self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64,", "self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64,", "self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512, layers[3], stride=2,", "_ in range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, ))", "self.layer3 = self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block, 512,", "self._make_layer( block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in", "torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs", "replace # the 2x2 stride with a dilated convolution instead replace_stride_with_dilation = [False,", "nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This is the empirical approximation for initialization", "law or agreed to in writing, software # distributed under the License is", "pretrained\") state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50(", "the License for the specific language governing permissions and # limitations under the", "self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(),", "stride stride = 1 if stride != 1 or self.inplanes != planes *", "= nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2", "has any effect only on certain modules. See documentations of particular modules for", "\"\"\" self.training = mode for module in self.children(): module.train(mode) if self.set_bn_eval: for module", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "convolution instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) != 3: raise ValueError(", "isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for m in self.modules():", "n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model", "first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual Learning", "self.groups = groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes,", "defining a ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp,", "self.n_classes, bias=False) if self.use_vmf: # This is the empirical approximation for initialization the", "\"\"\"Class for defining a ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3,", "element in the tuple indicates if we should replace # the 2x2 stride", "set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading ResNet50", "limitations under the License. \"\"\"Class for instantiating a ResNet in PyTorch. Code adapted", "if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1", "block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), )", "self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\",", ") layers = [] layers.append( block( self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation,", "3: raise ValueError( \"replace_stride_with_dilation should be None \" \"or a 3-element tuple, got", "got {}\".format(replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d(", "permissions and # limitations under the License. \"\"\"Class for instantiating a ResNet in", "= None previous_dilation = self.dilation if dilate: self.dilation *= stride stride = 1", "any effect only on certain modules. See documentations of particular modules for details", "in compliance with the License. # You may obtain a copy of the", "def train(self, mode=True): \"\"\"Sets the module in training mode. This has any effect", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "their behaviors in training/evaluation mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc.", "2021 The vMF Embeddings Authors. # # Licensed under the Apache License, Version", "approximation for initialization the vMF # distributions for each class in the final", "dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer( block,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "behaviors in training/evaluation mode, if they are affected, e.g. :class:`Dropout`, :class:`BatchNorm`, etc. Args:", "language governing permissions and # limitations under the License. \"\"\"Class for instantiating a", "for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet( arch_name,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "arch from vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining", "block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to eval mode during training self.set_bn_eval", "layer that remaps from the backbone output of ResNet # to the embedding", "train(self, mode=True): \"\"\"Sets the module in training mode. This has any effect only", "class ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\" def __init__( self, n_classes, embedding_dim,", "state_dict = load_state_dict_from_url( resnet.model_urls[arch_name], progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50( n_classes,", "Copyright 2021 The vMF Embeddings Authors. # # Licensed under the Apache License,", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks, stride=1, dilate=False):", "in self.children(): module.train(mode) if self.set_bn_eval: for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval()", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf: # This is", ":class:`BatchNorm`, etc. Args: mode: whether to set training mode (\"True\") or evaluation mode", "evaluation mode (\"False\"). Returns: self \"\"\" self.training = mode for module in self.children():", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "Residual Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3],", "load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class", "self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 =", "stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes * block.expansion for", "stride != 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes,", "isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0)", ") def train(self, mode=True): \"\"\"Sets the module in training mode. This has any", "return self def _resnet( arch_name, block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3,", "ResNet model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence,", "self.backbone_features = 512 * block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm to eval", "= 1 if replace_stride_with_dilation is None: # Each element in the tuple indicates", "nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init", "eval mode during training self.set_bn_eval = set_bn_eval # Make first convolution use a", "from: https://github.com/jeromerony/dml_cross_entropy/blob/master/models/base_model.py https://github.com/jeromerony/dml_cross_entropy/blob/master/models/architectures/resnet.py \"\"\" import logging import torch.nn as nn from torchvision.models import", "elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-init for m in", "mode for module in self.children(): module.train(mode) if self.set_bn_eval: for module in self.modules(): if", "import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for defining a ResNet architecture.\"\"\"", "in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode=\"fan_out\", nonlinearity=\"relu\") elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): nn.init.constant_(m.weight,", "logging import torch.nn as nn from torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url", "progress=progress) model.load_state_dict(state_dict, strict=False) model.create_encoder() return model def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3,", "__init__( self, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1,", "!= 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( resnet.conv1x1(self.inplanes, planes", "should replace # the 2x2 stride with a dilated convolution instead replace_stride_with_dilation =", "norm_layer(planes * block.expansion), ) layers = [] layers.append( block( self.inplanes, planes, stride, downsample,", "Version 2.0 (the \"License\"); # you may not use this file except in", "nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier,", "except in compliance with the License. # You may obtain a copy of", "arch_name, block, layers, pretrained, progress, n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence,", "from torchvision.models import resnet from torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch from", "embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading", "self.training = mode for module in self.children(): module.train(mode) if self.set_bn_eval: for module in", "nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer = self._norm_layer downsample", "self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu", "from \"Deep Residual Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4,", "module.train(mode) if self.set_bn_eval: for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self", "self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder =", "pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ): \"\"\"ResNet-50 model from \"Deep Residual", "# This is the empirical approximation for initialization the vMF # distributions for", "planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer, )) self.inplanes = planes * block.expansion", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "= self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block,", "m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0)", "self.children(): module.train(mode) if self.set_bn_eval: for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return", "replace_stride_with_dilation is None: # Each element in the tuple indicates if we should", "block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object. See arguments", "self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 =", "self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets the", "replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should", "self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.bn1 = self._norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool =", "dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier = nn.Linear(self.embedding_dim, self.n_classes, bias=False) if self.use_vmf:", "if len(replace_stride_with_dilation) != 3: raise ValueError( \"replace_stride_with_dilation should be None \" \"or a", "self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self,", "def resnet50( n_classes, embedding_dim, set_bn_eval, pretrained, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, progress=False, ):", "replace_stride_with_dilation=None, ): \"\"\"Initializes a ResNet architecture object. See arguments in arch.py.\"\"\" super(ResNet, self).__init__(embedding_dim,", "training mode (\"True\") or evaluation mode (\"False\"). Returns: self \"\"\" self.training = mode", "Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck, [3, 4, 6, 3], pretrained, progress, n_classes,", "torchvision.models.utils import load_state_dict_from_url from vmf_embeddings.archs import arch from vmf_embeddings.archs import utils log =", "(\"False\"). Returns: self \"\"\" self.training = mode for module in self.children(): module.train(mode) if", "model.\"\"\" model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block,", "# limitations under the License. \"\"\"Class for instantiating a ResNet in PyTorch. Code", "ResNet # to the embedding dimensionality self.remap = nn.Linear(self.backbone_features, self.embedding_dim) nn.init.zeros_(self.remap.bias) self.classifier =", "stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer( block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer(", "model = ResNet( n_classes, embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers,", "nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer( block, 128,", "in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def", "first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, ) if pretrained: log.info(\"Loading ResNet50 from", "batch-norm to eval mode during training self.set_bn_eval = set_bn_eval # Make first convolution", "self.kappa_confidence, self.embedding_dim) self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation is None: #", "self.dilation = 1 if replace_stride_with_dilation is None: # Each element in the tuple", "in range(1, blocks): layers.append( block( self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return", "kernel_size=3, stride=1, padding=1, bias=False) else: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3,", "groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1,", "embedding_dim, set_bn_eval, first_conv_3x3, use_vmf, learn_temp, init_temp, kappa_confidence, block, layers, groups=1, width_per_group=64, replace_stride_with_dilation=None, ):", "stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) for m in self.modules(): if isinstance(m, nn.Conv2d):", ")) self.inplanes = planes * block.expansion for _ in range(1, blocks): layers.append( block(", "stride = 1 if stride != 1 or self.inplanes != planes * block.expansion:", "mode (\"False\"). Returns: self \"\"\" self.training = mode for module in self.children(): module.train(mode)", "self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def", "mode: whether to set training mode (\"True\") or evaluation mode (\"False\"). Returns: self", "Zero-init for m in self.modules(): if isinstance(m, resnet.Bottleneck): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, resnet.BasicBlock):", "import arch from vmf_embeddings.archs import utils log = logging.getLogger(\"main\") class ResNet(arch.Arch): \"\"\"Class for", "self.layer3, self.layer4, self.avgpool, utils.Flatten(), self.remap, self.classifier, ) def train(self, mode=True): \"\"\"Sets the module", "\"\"\"ResNet-50 model from \"Deep Residual Learning for Image Recognition\".\"\"\" return _resnet( \"resnet50\", resnet.Bottleneck,", "for the specific language governing permissions and # limitations under the License. \"\"\"Class", "from the backbone output of ResNet # to the embedding dimensionality self.remap =", "init_temp, kappa_confidence) self.backbone_features = 512 * block.expansion self._norm_layer = nn.BatchNorm2d # Fixes batch-norm", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "groups self.base_width = width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1,", "dilated convolution instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) != 3: raise", "module in training mode. This has any effect only on certain modules. See", "details of their behaviors in training/evaluation mode, if they are affected, e.g. :class:`Dropout`,", "block, layers, ) if pretrained: log.info(\"Loading ResNet50 from Pytorch pretrained\") state_dict = load_state_dict_from_url(", "create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu, self.maxpool, self.layer1, self.layer2, self.layer3, self.layer4, self.avgpool,", "= width_per_group if self.first_conv_3x3: self.conv1 = nn.Conv2d( 3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False)", "remaps from the backbone output of ResNet # to the embedding dimensionality self.remap", "first convolution use a 3x3 kernel for CIFAR datasets self.first_conv_3x3 = first_conv_3x3 #", "dilation=self.dilation, norm_layer=norm_layer, )) return nn.Sequential(*layers) def create_encoder(self): self.encoder = nn.Sequential( self.conv1, self.bn1, self.relu,", "isinstance(m, resnet.BasicBlock): nn.init.constant_(m.bn2.weight, 0) def _make_layer(self, block, planes, blocks, stride=1, dilate=False): norm_layer =", "self.set_bn_eval: for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.eval() return self def _resnet(", "See documentations of particular modules for details of their behaviors in training/evaluation mode," ]
[ "except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def run(self):", "None') self._loop = loop if not callback: raise ValueError(f'callback is None') self._callback =", "no data received within device_timeout period \"\"\" l_now = get_sec() if (l_now -", "platform.system() == 'Linux': from .scanner_linux import scanner as _scanner from .ruuvitag_misc import hex_string,", "self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid}", "_shell_cmd(self, *, cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell(", "await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device}", "asyncio.sleep(0.2) logger.info('>>> bleak completed') return True # ------------------------------------------------------------------------------- def stop(self): logger.info(f'>>> bleak') self._stopevent.set()", "- self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired')", "if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await", "None') self._callback = callback self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids = mfids", "asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if platform.system() == 'Linux':", "\"\"\" Supervises reception of the bleak data Restarts socket if no data received", "except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises", "import logging logger = logging.getLogger('ruuvitag') import asyncio from contextlib import suppress from datetime", "in list(l_mdata.keys()): if not self._mfids or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>>", "# coding=utf-8 # !/usr/bin/python3 # Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic", "device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address,", "asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while", "in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''')", "received data from the Bleak scanner \"\"\" if not data: return self._data_ts =", "= await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}')", "l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>>", "None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset}", "'/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ):", "async def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises reception of the bleak", "self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds =", "not self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds", "= asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise", "import datetime as _dt, timedelta as _td import platform if platform.system() == 'Windows':", "for hci device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler:", "{self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------", "l_jobid, 'reset': self._device_reset }, id = l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES,", "else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>>", "timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for hci device nodata checking \"\"\"", "self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True # ------------------------------------------------------------------------------- def stop(self):", "import hex_string, get_sec from .ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES =", "try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except:", "def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down')", "logger.exception(f'>>> exception') break # l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2)", "self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not", "exception') pass except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...')", "GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>>", "data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not self._mfids or l_mfid in self._mfids: l_mfdata", "{ 'jobid': l_jobid, 'reset': self._device_reset }, id = l_jobid, replace_existing = True, max_instances", "coding=utf-8 # !/usr/bin/python3 # Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient", "next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def", "down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async", "# l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed')", "= None self._scanner_task = None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return", "l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac", "as _dt, timedelta as _td import platform if platform.system() == 'Windows': from .scanner_windows", "datetime import datetime as _dt, timedelta as _td import platform if platform.system() ==", "# =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*,", "asyncio.Event() self._scheduler = scheduler self._mfids = mfids self._device_reset = device_reset self._device_timeout = device_timeout", "device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop is None')", "= 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None,", "= l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}')", "= l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData(", "def _schedule(self): \"\"\" Initializes scheduler for hci device nodata checking \"\"\" logger.debug(f'>>> enter", "logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}')", ")) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def", "logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False", "jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False ):", "platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK #", "= True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>>", "self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if platform.system() == 'Linux': logger.info(f'>>>", "# ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self):", "Copyright: (c) 2019 TK # Licence: MIT # # sudo apt install bluez", "socket if no data received within device_timeout period \"\"\" l_now = get_sec() if", "replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) )", "apt install bluez # requires bluez 5.43 # ------------------------------------------------------------------------------ import logging logger =", "def run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue,", "device self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task = None", "scheduler self._mfids = mfids self._device_reset = device_reset self._device_timeout = device_timeout self._device = device", "l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout:", "scheduler for hci device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not", "exception') pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event()", "with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True #", "logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop is None') self._loop = loop if", "# Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient by <NAME> #", "try: await self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi, mfid = l_mfid, mfdata", "\"\"\" Handles received data from the Bleak scanner \"\"\" if not data: return", "*, cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd,", "self._device_reset = device_reset self._device_timeout = device_timeout self._device = device self._data_ts = 0 self._inqueue", "aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git #", "Handles received data from the Bleak scanner \"\"\" if not data: return self._data_ts", "logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop =", "logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self,", "_schedule(self): \"\"\" Initializes scheduler for hci device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)}", "stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\" Handles received data", "raise ValueError(f'callback is None') self._callback = callback self._stopevent = asyncio.Event() self._scheduler = scheduler", "jobid, reset=False ): \"\"\" Supervises reception of the bleak data Restarts socket if", "try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs = { 'jobid': l_jobid, 'reset':", "mfdata = l_mfdata, rawdata = data )) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>>", "# ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task =", "device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass", "self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting", ".ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig'", "_dt, timedelta as _td import platform if platform.system() == 'Windows': from .scanner_windows import", "mac = data.address, rssi = data.rssi, mfid = l_mfid, mfdata = l_mfdata, rawdata", "break except: logger.exception(f'>>> exception') break # l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set()", "Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK # Licence: MIT", "Bluetooth Low Energy platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c)", "loop: raise ValueError(f'loop is None') self._loop = loop if not callback: raise ValueError(f'callback", "cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}')", "ValueError(f'callback is None') self._callback = callback self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids", "logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>>", "self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try:", "------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD}", "self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try:", "is None') self._callback = callback self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids =", "l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try:", "l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout)", "the Bleak scanner \"\"\" if not data: return self._data_ts = get_sec() try: l_mdata", "stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout:", "#------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>>", "from contextlib import suppress from datetime import datetime as _dt, timedelta as _td", "rawdata = data )) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass #", "mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for hci device nodata", "def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises reception of the bleak data", "{l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *, data):", "scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not loop: raise", "{type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job(", "= self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except:", "if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\"", "requires bluez 5.43 # ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import asyncio from", "self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while not", "TK # Licence: MIT # # sudo apt install bluez # requires bluez", "self._mfids = mfids self._device_reset = device_reset self._device_timeout = device_timeout self._device = device self._data_ts", "or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)}", "scanner as _scanner from .ruuvitag_misc import hex_string, get_sec from .ble_data import BLEData #", "loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not", "await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *,", "data.rssi, mfid = l_mfid, mfdata = l_mfdata, rawdata = data )) except: logger.exception(f'>>>", "l_mfdata, rawdata = data )) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass", "\"\"\" Initializes scheduler for hci device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}')", "period \"\"\" l_now = get_sec() if (l_now - self._data_ts) > self._device_timeout: self._data_ts =", "# !/usr/bin/python3 # Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient by", "expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue,", "(self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break", "exception') raise while not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await", "if (l_now - self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer", "= data.address, rssi = data.rssi, mfid = l_mfid, mfdata = l_mfdata, rawdata =", "<NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK # Licence: MIT # #", "logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset()", "*, data): \"\"\" Handles received data from the Bleak scanner \"\"\" if not", "self._mfids or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid}", "Bleak scanner \"\"\" if not data: return self._data_ts = get_sec() try: l_mdata =", "------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device,", "stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try: if (self._inqueue): await", "except: logger.exception(f'>>> exception') break # l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await", "raise ValueError(f'loop is None') self._loop = loop if not callback: raise ValueError(f'callback is", "logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule()", "Energy platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK", "await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True # ------------------------------------------------------------------------------- def stop(self): logger.info(f'>>> bleak')", "l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\" Handles", "logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if", "install bluez # requires bluez 5.43 # ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag')", "= True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #-------------------------------------------------------------------------------", "self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>>", "{l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------", "from .ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD =", "bluez # requires bluez 5.43 # ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import", "device_timeout period \"\"\" l_now = get_sec() if (l_now - self._data_ts) > self._device_timeout: self._data_ts", "Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git", "#------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for hci device nodata checking \"\"\" logger.debug(f'>>>", "return self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if", "_scanner from .ruuvitag_misc import hex_string, get_sec from .ble_data import BLEData # =============================================================================== class", "{self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if", "): \"\"\" Supervises reception of the bleak data Restarts socket if no data", "except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break # l_task.cancel() #", "try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop))", "scanner as _scanner elif platform.system() == 'Linux': from .scanner_linux import scanner as _scanner", "{self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #-------------------------------------------------------------------------------", "await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if platform.system() ==", "self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set()", "self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break #", "self._device = device self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task", "as _scanner elif platform.system() == 'Linux': from .scanner_linux import scanner as _scanner from", "timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device,", "self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task = None logger.info(f'>>>", "self._schedule() except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await", "Licence: MIT # # sudo apt install bluez # requires bluez 5.43 #", "self._callback = callback self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids = mfids self._device_reset", "Initializes scheduler for hci device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if", "data received within device_timeout period \"\"\" l_now = get_sec() if (l_now - self._data_ts)", "kwargs = { 'jobid': l_jobid, 'reset': self._device_reset }, id = l_jobid, replace_existing =", "timedelta as _td import platform if platform.system() == 'Windows': from .scanner_windows import scanner", "import platform if platform.system() == 'Windows': from .scanner_windows import scanner as _scanner elif", "hci device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return", "rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address, rssi =", "# requires bluez 5.43 # ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import asyncio", "reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for hci device nodata checking", "**kwargs ): logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop is None') self._loop =", "= '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs", "logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>>", "l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def", "for l_mfid in list(l_mdata.keys()): if not self._mfids or l_mfid in self._mfids: l_mfdata =", "if platform.system() == 'Windows': from .scanner_windows import scanner as _scanner elif platform.system() ==", "break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break # l_task.cancel()", "# ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}')", "self._device_timeout = device_timeout self._device = device self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop", "self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break # l_task.cancel() # with suppress(asyncio.CancelledError):", "raise while not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100)", "if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs", "callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not loop:", "self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() #", "= await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r}", "while not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except", "await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited", "outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try: if (self._inqueue):", "__repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler", "coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}')", "True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async", "outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def", "# ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await", "data): \"\"\" Handles received data from the Bleak scanner \"\"\" if not data:", "except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except:", "exception') break # l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>>", "# sudo apt install bluez # requires bluez 5.43 # ------------------------------------------------------------------------------ import logging", "scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\"", "self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------", "device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await", "logging.getLogger('ruuvitag') import asyncio from contextlib import suppress from datetime import datetime as _dt,", "list(l_mdata.keys()): if not self._mfids or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device}", "device nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if", "------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc", "await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except", "of the bleak data Restarts socket if no data received within device_timeout period", "5.43 # ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import asyncio from contextlib import", "device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task =", "Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK # Licence:", "!/usr/bin/python3 # Name: aioruuvitag_bleak - Bluetooth Low Energy platform Agnostic Klient by <NAME>", "ValueError(f'loop is None') self._loop = loop if not callback: raise ValueError(f'callback is None')", "self._scheduler = scheduler self._mfids = mfids self._device_reset = device_reset self._device_timeout = device_timeout self._device", "\"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout: l_jobid =", "): logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop is None') self._loop = loop", "callback: raise ValueError(f'callback is None') self._callback = callback self._stopevent = asyncio.Event() self._scheduler =", "starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except:", "import suppress from datetime import datetime as _dt, timedelta as _td import platform", "asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break # l_task.cancel() # with", "from .scanner_linux import scanner as _scanner from .ruuvitag_misc import hex_string, get_sec from .ble_data", "= l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time =", "None self._scanner_task = None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak", "# Licence: MIT # # sudo apt install bluez # requires bluez 5.43", "= logging.getLogger('ruuvitag') import asyncio from contextlib import suppress from datetime import datetime as", "from datetime import datetime as _dt, timedelta as _td import platform if platform.system()", "l_now = get_sec() if (l_now - self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>>", "self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD}", "self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd):", "------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import asyncio from contextlib import suppress from", "bleak data Restarts socket if no data received within device_timeout period \"\"\" l_now", "await self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi, mfid = l_mfid, mfdata =", "await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError')", "def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes", "Low Energy platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019", "loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async", "pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await", "= l_mfid, mfdata = l_mfdata, rawdata = data )) except: logger.exception(f'>>> exception') pass", "== 'Linux': from .scanner_linux import scanner as _scanner from .ruuvitag_misc import hex_string, get_sec", "_do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises reception of the bleak data Restarts", "the bleak data Restarts socket if no data received within device_timeout period \"\"\"", "mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address, rssi", "async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop,", "}, id = l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True,", "by <NAME> # https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK # Licence: MIT #", "if not data: return self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid", "get_sec from .ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD", "'interval', seconds = 1, kwargs = { 'jobid': l_jobid, 'reset': self._device_reset }, id", "self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError:", "=============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop,", "callback self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids = mfids self._device_reset = device_reset", "jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises reception of", "jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset:", "max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled')", "= l_mfdata, rawdata = data )) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception')", "data from the Bleak scanner \"\"\" if not data: return self._data_ts = get_sec()", "suppress from datetime import datetime as _dt, timedelta as _td import platform if", "*, jobid, reset=False ): \"\"\" Supervises reception of the bleak data Restarts socket", "asyncio.Queue() self._scanner_stop = None self._scanner_task = None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def", "not data: return self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in", "except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop", "jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task", "nodata checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout:", "True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time = _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid}", "= get_sec() if (l_now - self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid}", "= scheduler self._mfids = mfids self._device_reset = device_reset self._device_timeout = device_timeout self._device =", "self._scanner_task = None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device}", "stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self):", "Restarts socket if no data received within device_timeout period \"\"\" l_now = get_sec()", "platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout,", "platform if platform.system() == 'Windows': from .scanner_windows import scanner as _scanner elif platform.system()", "{cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate()", "self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}')", "= 1, kwargs = { 'jobid': l_jobid, 'reset': self._device_reset }, id = l_jobid,", "up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self, *, cmd): if platform.system()", "self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task = None logger.info(f'>>> {self} initialized') #", "({self._device_timeout}sec) expired') try: logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop,", "mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop is", "mfid = l_mfid, mfdata = l_mfdata, rawdata = data )) except: logger.exception(f'>>> exception')", "not callback: raise ValueError(f'callback is None') self._callback = callback self._stopevent = asyncio.Event() self._scheduler", "Supervises reception of the bleak data Restarts socket if no data received within", "f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs = { 'jobid': l_jobid,", "suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True # -------------------------------------------------------------------------------", "self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>>", "self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception')", "def _shell_cmd(self, *, cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await", "mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi, mfid =", "= asyncio.Queue() self._scanner_stop = None self._scanner_task = None logger.info(f'>>> {self} initialized') # -------------------------------------------------------------------------------", "device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout,", "from .scanner_windows import scanner as _scanner elif platform.system() == 'Linux': from .scanner_linux import", "break # l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak", "= _dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self,", "asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with", "run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop))", "(l_now - self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec)", "'Windows': from .scanner_windows import scanner as _scanner elif platform.system() == 'Linux': from .scanner_linux", "if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async", "cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE,", "filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi, mfid = l_mfid,", "self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids = mfids self._device_reset = device_reset self._device_timeout", "rssi = data.rssi, mfid = l_mfid, mfdata = l_mfdata, rawdata = data ))", "stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *,", "asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break", "self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs = { 'jobid': l_jobid, 'reset': self._device_reset", "'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr =", "try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not self._mfids or l_mfid", "sudo apt install bluez # requires bluez 5.43 # ------------------------------------------------------------------------------ import logging logger", "self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs =", "data.address, rssi = data.rssi, mfid = l_mfid, mfdata = l_mfdata, rawdata = data", "pass except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try:", "mfids self._device_reset = device_reset self._device_timeout = device_timeout self._device = device self._data_ts = 0", "== 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr", "async def _handle_data(self, *, data): \"\"\" Handles received data from the Bleak scanner", "import scanner as _scanner elif platform.system() == 'Linux': from .scanner_linux import scanner as", "import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #-------------------------------------------------------------------------------", "ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None,", "l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return", "l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata,", "self._device_reset }, id = l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce =", "#------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises reception of the", "l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>>", "logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid, reset=False ): \"\"\" Supervises reception", "= None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids}", "asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def _shell_cmd(self,", "try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit')", "get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not self._mfids or", "'reset': self._device_reset }, id = l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce", "except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0)", "logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0)", "_td import platform if platform.system() == 'Windows': from .scanner_windows import scanner as _scanner", "contextlib import suppress from datetime import datetime as _dt, timedelta as _td import", "is None') self._loop = loop if not callback: raise ValueError(f'callback is None') self._callback", "0 self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task = None logger.info(f'>>> {self} initialized')", "import asyncio from contextlib import suppress from datetime import datetime as _dt, timedelta", "id = l_jobid, replace_existing = True, max_instances = self.SCHEDULER_MAX_INSTANCES, coalesce = True, next_run_time", "scanner \"\"\" if not data: return self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data']", "= mfids self._device_reset = device_reset self._device_timeout = device_timeout self._device = device self._data_ts =", "device_timeout self._device = device self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop = None", "self._scanner_stop = None self._scanner_task = None logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self):", "self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try:", "elif platform.system() == 'Linux': from .scanner_linux import scanner as _scanner from .ruuvitag_misc import", "logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break # l_task.cancel() # with suppress(asyncio.CancelledError): #", "== 'Windows': from .scanner_windows import scanner as _scanner elif platform.system() == 'Linux': from", "if not callback: raise ValueError(f'callback is None') self._callback = callback self._stopevent = asyncio.Event()", "= self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set():", "# Copyright: (c) 2019 TK # Licence: MIT # # sudo apt install", "loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try: if", "SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0',", "= f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs = { 'jobid':", "= asyncio.Event() self._scheduler = scheduler self._mfids = mfids self._device_reset = device_reset self._device_timeout =", "logger.error(f'>>> stder: {l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\" Handles received", "device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop is None') self._loop", "= data.rssi, mfid = l_mfid, mfdata = l_mfdata, rawdata = data )) except:", "initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def", "l_mfid, mfdata = l_mfdata, rawdata = data )) except: logger.exception(f'>>> exception') pass except:", "async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device}", "= device_timeout self._device = device self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop =", "------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\" Handles received data from the Bleak", "= callback self._stopevent = asyncio.Event() self._scheduler = scheduler self._mfids = mfids self._device_reset =", "self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0)", "await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if", "= data )) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass # -------------------------------------------------------------------------------", "self._loop = loop if not callback: raise ValueError(f'callback is None') self._callback = callback", "device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for hci device", "return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for", "data Restarts socket if no data received within device_timeout period \"\"\" l_now =", "enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout' try:", "pass # ------------------------------------------------------------------------------- async def run(self): logger.info(f'>>> starting...') try: self._scanner_stop = asyncio.Event() self._scanner_task", "logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await", "# ------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\" Handles received data from the", "class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback,", "mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi,", "logging logger = logging.getLogger('ruuvitag') import asyncio from contextlib import suppress from datetime import", "def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}')", "= device self._data_ts = 0 self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task =", "exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set()", "{cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder:", "await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear() # ------------------------------------------------------------------------------ async def", "logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else:", "l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr:", "if not loop: raise ValueError(f'loop is None') self._loop = loop if not callback:", "> self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout timer ({self._device_timeout}sec) expired') try: logger.info(f'>>>", "except: logger.exception(f'>>> exception') raise while not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get())", "if not self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval',", "if no data received within device_timeout period \"\"\" l_now = get_sec() if (l_now", "bluez 5.43 # ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import asyncio from contextlib", "l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await", "{l_stderr.decode()}') # ------------------------------------------------------------------------------ async def _handle_data(self, *, data): \"\"\" Handles received data from", "------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\"", "reception of the bleak data Restarts socket if no data received within device_timeout", ".scanner_windows import scanner as _scanner elif platform.system() == 'Linux': from .scanner_linux import scanner", "logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout: l_jobid = f'bleak_timeout'", "await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up') await asyncio.sleep(1.0) self._scanner_stop.clear()", "l_mfid in list(l_mdata.keys()): if not self._mfids or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid]", "# with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True", "CanceledError') break except: logger.exception(f'>>> exception') break # l_task.cancel() # with suppress(asyncio.CancelledError): # self._loop.run_until_complete(l_task)", "reset=False ): \"\"\" Supervises reception of the bleak data Restarts socket if no", "# https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK # Licence: MIT # # sudo", "import scanner as _scanner from .ruuvitag_misc import hex_string, get_sec from .ble_data import BLEData", "data: return self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()):", "MIT # # sudo apt install bluez # requires bluez 5.43 # ------------------------------------------------------------------------------", "from .ruuvitag_misc import hex_string, get_sec from .ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object):", "with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}') #", "= get_sec() try: l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not self._mfids", "within device_timeout period \"\"\" l_now = get_sec() if (l_now - self._data_ts) > self._device_timeout:", "jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>>", "as _td import platform if platform.system() == 'Windows': from .scanner_windows import scanner as", "2019 TK # Licence: MIT # # sudo apt install bluez # requires", "logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi} mfid:{l_mfid} mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac =", ".scanner_linux import scanner as _scanner from .ruuvitag_misc import hex_string, get_sec from .ble_data import", "_scanner elif platform.system() == 'Linux': from .scanner_linux import scanner as _scanner from .ruuvitag_misc", "= data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not self._mfids or l_mfid in self._mfids:", "HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0,", "as _scanner from .ruuvitag_misc import hex_string, get_sec from .ble_data import BLEData # ===============================================================================", "datetime as _dt, timedelta as _td import platform if platform.system() == 'Windows': from", "= loop if not callback: raise ValueError(f'callback is None') self._callback = callback self._stopevent", "stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) l_stdout, l_stderr = await l_proc.communicate() logger.info(f'>>> {cmd!r} exited with {l_proc.returncode}') if", "_dt.now()+_td(seconds=self._device_timeout) ) logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *,", "= device_reset self._device_timeout = device_timeout self._device = device self._data_ts = 0 self._inqueue =", "platform.system() == 'Windows': from .scanner_windows import scanner as _scanner elif platform.system() == 'Linux':", "checking \"\"\" logger.debug(f'>>> enter {type(self._scheduler)} device_timeout:{self._device_timeout}') if not self._scheduler: return if self._device_timeout: l_jobid", "self._do_bleak_timeout, 'interval', seconds = 1, kwargs = { 'jobid': l_jobid, 'reset': self._device_reset },", "self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True # ------------------------------------------------------------------------------- def stop(self): logger.info(f'>>>", "restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception')", "device_reset self._device_timeout = device_timeout self._device = device self._data_ts = 0 self._inqueue = asyncio.Queue()", "1, kwargs = { 'jobid': l_jobid, 'reset': self._device_reset }, id = l_jobid, replace_existing", "exited with {l_proc.returncode}') if l_stdout: logger.debug(f'>>> stdout: {l_stdout.decode()}') if l_stderr: logger.error(f'>>> stder: {l_stderr.decode()}')", "asyncio from contextlib import suppress from datetime import datetime as _dt, timedelta as", "https://github.com/hbldh/bleak.git # Copyright: (c) 2019 TK # Licence: MIT # # sudo apt", "l_mdata = data.metadata['manufacturer_data'] for l_mfid in list(l_mdata.keys()): if not self._mfids or l_mfid in", "'jobid': l_jobid, 'reset': self._device_reset }, id = l_jobid, replace_existing = True, max_instances =", "hex_string, get_sec from .ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5", "not self._stopevent.is_set(): try: if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit:", "# self._loop.run_until_complete(l_task) self._scanner_stop.set() await asyncio.sleep(0.2) logger.info('>>> bleak completed') return True # ------------------------------------------------------------------------------- def", "- Bluetooth Low Energy platform Agnostic Klient by <NAME> # https://github.com/hbldh/bleak.git # Copyright:", "if (self._inqueue): await self._handle_data(data=await self._inqueue.get()) else: await asyncio.sleep(100) except GeneratorExit: logger.error(f'>>> GeneratorExit') self._stopevent.set()", "async def _shell_cmd(self, *, cmd): if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc =", "logger.error(f'>>> GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception')", "\"\"\" if not data: return self._data_ts = get_sec() try: l_mdata = data.metadata['manufacturer_data'] for", "try: self._scanner_stop = asyncio.Event() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) self._schedule() except: logger.exception(f'>>>", "received within device_timeout period \"\"\" l_now = get_sec() if (l_now - self._data_ts) >", "seconds = 1, kwargs = { 'jobid': l_jobid, 'reset': self._device_reset }, id =", "\"\"\" l_now = get_sec() if (l_now - self._data_ts) > self._device_timeout: self._data_ts = l_now", "l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1, kwargs = {", "loop if not callback: raise ValueError(f'callback is None') self._callback = callback self._stopevent =", ".ruuvitag_misc import hex_string, get_sec from .ble_data import BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES", "data )) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> exception') pass # ------------------------------------------------------------------------------- async", "device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if not loop: raise ValueError(f'loop", "= self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') #", "BLEData # =============================================================================== class ruuvitag_bleak(object): SCHEDULER_MAX_INSTANCES = 5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def", "__init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False, device_timeout=10.0, **kwargs ): logger.info(f'>>> device:{device}') if", "_handle_data(self, *, data): \"\"\" Handles received data from the Bleak scanner \"\"\" if", "device:{device}') if not loop: raise ValueError(f'loop is None') self._loop = loop if not", "except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>> jobid:{jobid}') # ------------------------------------------------------------------------------ async def _reset(self): logger.debug(f'>>>", "from the Bleak scanner \"\"\" if not data: return self._data_ts = get_sec() try:", "get_sec() if (l_now - self._data_ts) > self._device_timeout: self._data_ts = l_now logger.warning(f'>>> jobid:{jobid} device_timeout", "not loop: raise ValueError(f'loop is None') self._loop = loop if not callback: raise", "# # sudo apt install bluez # requires bluez 5.43 # ------------------------------------------------------------------------------ import", "if not self._mfids or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address}", "return if self._device_timeout: l_jobid = f'bleak_timeout' try: self._scheduler.add_job( self._do_bleak_timeout, 'interval', seconds = 1,", "mflen:{len(l_mfdata)} mfdata:{hex_string(data=l_mfdata, filler='')}''') try: await self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi, mfid", "def _handle_data(self, *, data): \"\"\" Handles received data from the Bleak scanner \"\"\"", "# ------------------------------------------------------------------------------ import logging logger = logging.getLogger('ruuvitag') import asyncio from contextlib import suppress", "5 HCICONFIG_CMD = '/bin/hciconfig' #------------------------------------------------------------------------------- def __init__(self,*, loop, callback, scheduler=None, device='hci0', mfids=None, device_reset=False,", "'Linux': from .scanner_linux import scanner as _scanner from .ruuvitag_misc import hex_string, get_sec from", "self._callback(bledata=BLEData( mac = data.address, rssi = data.rssi, mfid = l_mfid, mfdata = l_mfdata,", ") logger.info(f'>>> jobid:{l_jobid} scheduled') except: logger.exception(f'>>> jobid:{l_jobid}') #------------------------------------------------------------------------------- async def _do_bleak_timeout(self, *, jobid,", "if platform.system() == 'Linux': logger.info(f'>>> {cmd!r}') l_proc = await asyncio.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)", "_reset(self): logger.debug(f'>>> device:{self._device}') self._scanner_stop.set() await asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await", "= 0 self._inqueue = asyncio.Queue() self._scanner_stop = None self._scanner_task = None logger.info(f'>>> {self}", "f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}' #------------------------------------------------------------------------------- def _schedule(self): \"\"\" Initializes scheduler for hci", "GeneratorExit') self._stopevent.set() break except asyncio.CancelledError: self._stopevent.set() logger.warning(f'>>> CanceledError') break except: logger.exception(f'>>> exception') break", "not self._mfids or l_mfid in self._mfids: l_mfdata = l_mdata[l_mfid] logger.debug(f'''>>> device:{self._device} mac:{data.address} rssi:{data.rssi}", "asyncio.sleep(1.0) if self._device_reset: await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} down') await asyncio.sleep(1.0) await self._shell_cmd(cmd=f'{self.HCICONFIG_CMD} {self._device} up')", "logger.info(f'>>> jobid:{jobid} restarting device:{self._device}') try: self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except:", "self._reset() self._scanner_task = self._loop.create_task(_scanner(device=self._device, loop=self._loop, outqueue=self._inqueue, stopevent=self._scanner_stop)) except: logger.exception(f'>>> exception') pass except: logger.exception(f'>>>", "logger.info(f'>>> {self} initialized') # ------------------------------------------------------------------------------- def __repr__(self): return f'ruuvitag_bleak device:{self._device} mfids:{self._mfids} reset:{self._device_reset} timeout:{self._device_timeout}'", "= { 'jobid': l_jobid, 'reset': self._device_reset }, id = l_jobid, replace_existing = True,", "(c) 2019 TK # Licence: MIT # # sudo apt install bluez #", "logger = logging.getLogger('ruuvitag') import asyncio from contextlib import suppress from datetime import datetime" ]
[ ":: Python :: 3\", \"License :: OSI Approved :: MIT License\", \"Operating System", "\"Programming Language :: Python :: 3\", \"License :: OSI Approved :: MIT License\",", "System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ],", "# 'rex' : [ # 'models/*.pth' # ], # }, # include_package_data=True, )", "setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\",", ":: OSI Approved :: MIT License\", \"Operating System :: OS Independent\", ], python_requires=\">=3.7\",", "\"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex' : [ #", "# package_data={ # 'rex' : [ # 'models/*.pth' # ], # }, #", "os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__,", "import setuptools from rex import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\")", "more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python", "long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python :: 3\",", "= fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and", "\"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex' : [ # 'models/*.pth'", "classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI Approved :: MIT", "as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for", "\"README.md\") with open(readme_filepath, \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\",", "rex import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh: long_description", "\"Operating System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\",", "Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ #", "and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language ::", "\"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex' : [ # 'models/*.pth' # ],", "\"tqdm==4.61.1\", ], # package_data={ # 'rex' : [ # 'models/*.pth' # ], #", "Approved :: MIT License\", \"Operating System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\",", "= os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\",", "import os import setuptools from rex import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with", "setuptools from rex import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as", "Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language", ":: 3\", \"License :: OSI Approved :: MIT License\", \"Operating System :: OS", "], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex'", "\"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI Approved", "License\", \"Operating System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\",", "3\", \"License :: OSI Approved :: MIT License\", \"Operating System :: OS Independent\",", "install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex' : [", "url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python :: 3\", \"License", "fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and more...\",", "Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming", "readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh: long_description = fh.read() setuptools.setup(", "open(readme_filepath, \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A", "OSI Approved :: MIT License\", \"Operating System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[", "\"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex' : [ # 'models/*.pth' #", "MIT License\", \"Operating System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\",", ":: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], #", "OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={", "__version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh: long_description = fh.read()", "long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction", "for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[", "\"License :: OSI Approved :: MIT License\", \"Operating System :: OS Independent\", ],", "fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation", "author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\",", "long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python ::", "], # package_data={ # 'rex' : [ # 'models/*.pth' # ], # },", ":: MIT License\", \"Operating System :: OS Independent\", ], python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\",", "import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh: long_description =", "\"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI", "with open(readme_filepath, \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\",", "name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description,", "python_requires=\">=3.7\", install_requires=[ \"numpy>=1.19.0\", \"scikit-learn>=0.21.3\", \"omegaconf>=2.0.6\", \"loguru==0.5.3\", \"tqdm==4.61.1\", ], # package_data={ # 'rex' :", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\",", "from rex import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath, \"r\") as fh:", "toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]),", "packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\", \"docs.*\"]), classifiers=[ \"Programming Language :: Python :: 3\", \"License ::", "\"docs.*\"]), classifiers=[ \"Programming Language :: Python :: 3\", \"License :: OSI Approved ::", "os import setuptools from rex import __version__ readme_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\") with open(readme_filepath,", "package_data={ # 'rex' : [ # 'models/*.pth' # ], # }, # include_package_data=True,", "Language :: Python :: 3\", \"License :: OSI Approved :: MIT License\", \"Operating", "version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\",", "description=\"A toolkit for Relation Extraction and more...\", long_description_content_type=\"text/markdown\", long_description=long_description, url=\"https://github.com/Spico197/REx\", packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\", \"docs\",", "Python :: 3\", \"License :: OSI Approved :: MIT License\", \"Operating System ::", "\"r\") as fh: long_description = fh.read() setuptools.setup( name=\"pytorch-rex\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"A toolkit" ]
[ "kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import", "from common.utils import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table,", "= get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not", "study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if", "get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)): continue", "\"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, }", "[ { \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ],", "not all((study_id, institution, investigator_name, study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE,", "\"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ {", "study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\",", "{ \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id,", "\"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\":", "short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups: retval[\"enrollment\"] =", "= { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ]", "common.utils import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id,", "if not all((study_id, institution, investigator_name, study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\":", "{ \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] },", "] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\",", "\"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution:", "make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id =", "{ \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\":", "attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } )", "= \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups ): for row", "get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row,", "continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [", "\"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier, make_select, get RESOURCE_TYPE =", "eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row,", "groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for group in groups.values() ] yield retval", "retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups: retval[\"enrollment\"] = [", "from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\"", "\"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, },", "\"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\": study_name, \"status\": \"completed\",", "kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def", "investigator_name, study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": {", "if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups: retval[\"enrollment\"]", "CONCEPT from common.utils import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng,", "\"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\":", "], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\": {", "CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name", "\"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [", "} ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, }", "\"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\": study_name, \"status\":", "target_service_id, organizations, practitioner_roles, groups ): for row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION,", "get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row,", "\"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name:", "} if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name,", "CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name =", "], } ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' },", "= get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution =", "table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID)", "row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ):", "= get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)):", "CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION)", "study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\":", "converts Kids First studies to FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from", "): study_id = get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME)", "in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id", "\"value\": study_id, }, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\":", "[ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}'", "}, } ], } ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution,", "f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\":", "= get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)): continue retval = {", "}, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\",", "CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME)", "CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row,", "CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution", "\"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [ {", "study_id = get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name", "short_name, } ) if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for group in", "{ \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\":", "\"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [", "\"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\": study_name,", "\"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id,", "} ], } ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}'", "], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if", "FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier, make_select, get", "\"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for", "table, target_service_id, organizations, practitioner_roles, groups ): for row in make_select( eng, table, CONCEPT.STUDY.ID,", "\"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\": { \"reference\":", "\"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution})", "\"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if", "import CONCEPT from common.utils import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies(", "\"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": {", "if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, }", "all((study_id, institution, investigator_name, study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id),", "short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)): continue retval =", "for row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME,", "attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name,", "make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups", "\"\"\" This module converts Kids First studies to FHIR kfdrc-research-study (derived from FHIR", "\"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( {", "[ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, {", "= get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name =", "CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution,", ") if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for group in groups.values() ]", "investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name", "{ \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' },", "\"extension\": [ { \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], }", "}, \"identifier\": [ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\":", "{ \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append(", "eng, table, target_service_id, organizations, practitioner_roles, groups ): for row in make_select( eng, table,", "First studies to FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import", "\"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ {", "): for row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY,", "\"valueString\": short_name, } ) if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for group", "from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier, make_select,", "organizations, practitioner_roles, groups ): for row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME,", "RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups ): for", "CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION)", "\"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups ): for row in", "groups ): for row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME,", "}, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\":", "= get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name =", "FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils", "CONCEPT.STUDY.ID) institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME)", "} ) if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for group in groups.values()", "get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row,", "CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution = get(row,", "retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\"", "\"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\":", "practitioner_roles, groups ): for row in make_select( eng, table, CONCEPT.STUDY.ID, CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION,", "{ \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\":", "get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups ):", "Kids First studies to FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema", "import make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations,", "make_identifier, make_select, get RESOURCE_TYPE = \"ResearchStudy\" def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles,", "{ \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ],", "def yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups ): for row in make_select(", "yield_kfdrc_research_studies( eng, table, target_service_id, organizations, practitioner_roles, groups ): for row in make_select( eng,", "\"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'}", "module converts Kids First studies to FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\"", "studies to FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT", "This module converts Kids First studies to FHIR kfdrc-research-study (derived from FHIR ResearchStudy).", "CONCEPT.INVESTIGATOR.INSTITUTION, CONCEPT.INVESTIGATOR.NAME, CONCEPT.STUDY.ATTRIBUTION, CONCEPT.STUDY.SHORT_NAME, CONCEPT.STUDY.AUTHORITY, CONCEPT.STUDY.NAME, ): study_id = get(row, CONCEPT.STUDY.ID) institution =", "get(row, CONCEPT.STUDY.NAME) attribution = get(row, CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id,", "CONCEPT.STUDY.ATTRIBUTION) short_name = get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)): continue retval", "investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\",", "to FHIR kfdrc-research-study (derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from", "{ \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups: retval[\"enrollment\"] = [ {\"reference\":", "f'Organization/{organizations[institution][\"id\"]}' }, } ], } ], \"title\": study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\":", "get(row, CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)): continue retval = { \"resourceType\":", "institution, investigator_name, study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\":", "study_id, }, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\",", "\"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\": { \"reference\": f'Organization/{organizations[institution][\"id\"]}' }, } ],", "if groups: retval[\"enrollment\"] = [ {\"reference\": f'Group/{group[\"id\"]}'} for group in groups.values() ] yield", "retval[\"identifier\"].append({\"value\": attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if", "CONCEPT.STUDY.SHORT_NAME) if not all((study_id, institution, investigator_name, study_name)): continue retval = { \"resourceType\": RESOURCE_TYPE,", "(derived from FHIR ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier,", "target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\": [ { \"url\":", "[ { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, },", "RESOURCE_TYPE, \"id\": make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [", "attribution}) if short_name: retval[\"extension\"].append( { \"url\": \"http://fhir.kids-first.io/StructureDefinition/display-name\", \"valueString\": short_name, } ) if groups:", "\"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies\", \"value\": target_service_id, }, { \"system\": \"https://kf-api-dataservice.kidsfirstdrc.org/studies?external_id=\", \"value\": study_id, }, ], \"extension\":", "ResearchStudy). \"\"\" from kf_lib_data_ingest.common.concept_schema import CONCEPT from common.utils import make_identifier, make_select, get RESOURCE_TYPE", "make_identifier(RESOURCE_TYPE, study_id), \"meta\": { \"profile\": [ \"http://fhir.kids-first.io/StructureDefinition/kfdrc-research-study\" ] }, \"identifier\": [ { \"system\":", "institution = get(row, CONCEPT.INVESTIGATOR.INSTITUTION) investigator_name = get(row, CONCEPT.INVESTIGATOR.NAME) study_name = get(row, CONCEPT.STUDY.NAME) attribution", "study_name, \"status\": \"completed\", \"principalInvestigator\": { \"reference\": f'PractitionerRole/{practitioner_roles[(institution, investigator_name)][\"id\"]}' }, } if attribution: retval[\"identifier\"].append({\"value\":", "}, ], \"extension\": [ { \"url\": \"http://fhir.kids-first.io/StructureDefinition/related-organization\", \"extension\": [ { \"url\": \"organization\", \"valueReference\":" ]
[ "to do something with the actual data. Usually - this is sending to", "json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock = threading.Lock()", "successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called by", "self._max_buffer_size: logger.debug( \"flushing since queue size %d bytes > max_queue_size %d bytes\", queue_size,", "True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout,", "self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success()", "= True sync_transport = Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker", "logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing due to", "flush\") self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing due to time", "timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1 def", "timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int) def queue(self, event_type, data, flush=False): with", "Usually - this is sending to a server \"\"\" raise NotImplementedError def close(self):", "\"\"\" Success handler called by the transport on successful send \"\"\" self.state.set_success() def", "if hasattr(self, \"send_async\") and not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception", "self.retry_number = -1 def should_try(self): if self.status == self.ONLINE: return True interval =", "a new Transport instance :param metadata: Metadata object to prepend to every queue", "queued_data.close() # StringIO on Python 2 does not have getbuffer, so we need", "\"\"\" Create a new Transport instance :param metadata: Metadata object to prepend to", "if self.status == self.ONLINE: return True interval = min(self.retry_number, 6) ** 2 return", "method.. \"\"\" async_mode = False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None,", "data}) def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1", "failure back-off\") elif queued_data: fileobj = queued_data.fileobj # get a reference to the", "= Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property", "def __init__(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1 def should_try(self):", "data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace = print_trace class Transport(object): \"\"\"", "with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(),", "get a reference to the fileobj before closing the gzip file queued_data.close() #", "= max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock = threading.Lock() self._last_flush =", "reference to the fileobj before closing the gzip file queued_data.close() # StringIO on", "to transport failure back-off\") elif queued_data: fileobj = queued_data.fileobj # get a reference", "super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR = 0", "serializer to use for JSON encoding :param max_flush_time: Maximum time between flushes in", "time between flushes in seconds :param max_buffer_size: Maximum size of buffer before flush", "submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout =", "flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport = Transport def __init__(self,", "transport on send failure \"\"\" message = str(exception) logger.error(\"Failed to submit message: %r\",", "close(self): \"\"\" Cleans up resources and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def", "called by the transport on send failure \"\"\" message = str(exception) logger.error(\"Failed to", "self.state = TransportState() self._metadata = metadata if metadata is not None else {}", "= json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock =", "def should_try(self): if self.status == self.ONLINE: return True interval = min(self.retry_number, 6) **", ":param kwargs: \"\"\" self.state = TransportState() self._metadata = metadata if metadata is not", "self._compress_level = min(9, max(0, compress_level if compress_level is not None else 0)) self._json_serializer", "class AsyncTransport(Transport): async_mode = True sync_transport = Transport def __init__(self, *args, **kwargs): super(AsyncTransport,", "need to override this to do something with the actual data. Usually -", "if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR = 0 def __init__(self):", "due to time since last flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, )", "async_mode = False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ):", "if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing", "max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a new Transport instance :param metadata: Metadata", "to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout", "None self._counts = defaultdict(int) def queue(self, event_type, data, flush=False): with self._queue_lock: queued_data =", "true, flushes the queue synchronously in the current thread :param start_flush_timer: set to", "elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def", "Success handler called by the transport on successful send \"\"\" self.state.set_success() def handle_transport_fail(self,", "from collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder from", "of the flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data,", "= self.ONLINE self.last_check = None self.retry_number = -1 def did_fail(self): return self.status ==", "queue synchronously in the current thread :param start_flush_timer: set to True if the", "from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO logger", "self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called by the transport on", "metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a new Transport instance", "import logging import threading import timeit from collections import defaultdict from elasticapm.contrib.async_worker import", "send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def send_async(self,", "new Transport instance :param metadata: Metadata object to prepend to every queue :param", "GZip compress level. If zero, no GZip compression will be used :param json_serializer:", "self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def", "fall back to getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if", "if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport =", "as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close()", "= min(self.retry_number, 6) ** 2 return timeit.default_timer() - self.last_check > interval def set_fail(self):", "from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None,", "elif queued_data: fileobj = queued_data.fileobj # get a reference to the fileobj before", "self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\":", "def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue :param sync: if true, flushes", "%.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size >", "self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return", "+ \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue", "else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time:", "not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush", "data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data):", "compression will be used :param json_serializer: serializer to use for JSON encoding :param", "elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing since queue size %d bytes", "queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug(", "import timeit from collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import", "\"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time self._flush_timer =", "if queued_data and not self.state.should_try(): logger.error(\"dropping flushed data due to transport failure back-off\")", "something with the actual data. Usually - this is sending to a server", "sync=False, start_flush_timer=True): \"\"\" Flush the queue :param sync: if true, flushes the queue", "fileobj.getvalue() if hasattr(self, \"send_async\") and not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except", "and since_last_flush > self._max_flush_time: logger.debug( \"flushing due to time since last flush %.3fs", "Create a new Transport instance :param metadata: Metadata object to prepend to every", "self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing since queue size %d bytes >", "import AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\")", "%d bytes\", queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property", "and not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e)", "self).__init__(*args, **kwargs) self._worker = None @property def worker(self): if not self._worker or not", "so we need to fall back to getvalue data = fileobj.getbuffer() if hasattr(fileobj,", "self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e)", "before closing the gzip file queued_data.close() # StringIO on Python 2 does not", "not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data is None: self._queued_data", "min(9, max(0, compress_level if compress_level is not None else 0)) self._json_serializer = json_serializer", "def handle_transport_success(self, **kwargs): \"\"\" Success handler called by the transport on successful send", "def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True", "return True interval = min(self.retry_number, 6) ** 2 return timeit.default_timer() - self.last_check >", "flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size", "max_flush_time: Maximum time between flushes in seconds :param max_buffer_size: Maximum size of buffer", "1 ERROR = 0 def __init__(self): self.status = self.ONLINE self.last_check = None self.retry_number", "**kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property def worker(self): if not self._worker", "self.ONLINE: return True interval = min(self.retry_number, 6) ** 2 return timeit.default_timer() - self.last_check", "self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer()", "return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue :param sync: if", "the actual data. Usually - this is sending to a server \"\"\" raise", "None if queued_data and not self.state.should_try(): logger.error(\"dropping flushed data due to transport failure", "None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data)", "or not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self,", "self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush", "queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self):", "compress level. If zero, no GZip compression will be used :param json_serializer: serializer", "this is sending to a server \"\"\" raise NotImplementedError def close(self): \"\"\" Cleans", "self._worker = AsyncWorker() return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except", "in seconds :param max_buffer_size: Maximum size of buffer before flush :param kwargs: \"\"\"", "handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called by the transport on send failure", "threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int) def queue(self, event_type,", "the transport on successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure", "data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer() - self._last_flush queue_size =", "0 if queued_data.fileobj is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif", "(self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush", "if true, flushes the queue synchronously in the current thread :param start_flush_timer: set", "should_try(self): if self.status == self.ONLINE: return True interval = min(self.retry_number, 6) ** 2", "message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout", "to a server \"\"\" raise NotImplementedError def close(self): \"\"\" Cleans up resources and", "queue size %d bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush() elif", "self).__init__(message) self.data = data self.print_trace = print_trace class Transport(object): \"\"\" All transport implementations", "else {} self._compress_level = min(9, max(0, compress_level if compress_level is not None else", "is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush", "set_fail(self): self.status = self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer() def set_success(self): self.status", "close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR =", "this class You must implement a send method.. \"\"\" async_mode = False def", "gzip file queued_data.close() # StringIO on Python 2 does not have getbuffer, so", "queue :param sync: if true, flushes the queue synchronously in the current thread", ":param compress_level: GZip compress level. If zero, no GZip compression will be used", "return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e:", "def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property def worker(self):", "\"\"\" Cleans up resources and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self,", "size of buffer before flush :param kwargs: \"\"\" self.state = TransportState() self._metadata =", "self._metadata = metadata if metadata is not None else {} self._compress_level = min(9,", "\"\"\" message = str(exception) logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True))", "): \"\"\" Create a new Transport instance :param metadata: Metadata object to prepend", "and not self.state.should_try(): logger.error(\"dropping flushed data due to transport failure back-off\") elif queued_data:", "self._flush_timer = None self._counts = defaultdict(int) def queue(self, event_type, data, flush=False): with self._queue_lock:", "**kwargs): \"\"\" Success handler called by the transport on successful send \"\"\" self.state.set_success()", "= None @property def worker(self): if not self._worker or not self._worker.is_alive(): self._worker =", "in the current thread :param start_flush_timer: set to True if the flush timer", "self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close() if self._worker:", "size %d bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush() elif not", "= threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int) def queue(self,", "handle_transport_success(self, **kwargs): \"\"\" Success handler called by the transport on successful send \"\"\"", "data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and not", "def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace = print_trace", "bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer: with", "is not None else {} self._compress_level = min(9, max(0, compress_level if compress_level is", "6) ** 2 return timeit.default_timer() - self.last_check > interval def set_fail(self): self.status =", "timeout=None): timeout = timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm", "self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You need", "no GZip compression will be used :param json_serializer: serializer to use for JSON", "max(0, compress_level if compress_level is not None else 0)) self._json_serializer = json_serializer self._max_flush_time", "def close(self): \"\"\" Cleans up resources and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False)", "max_buffer_size: Maximum size of buffer before flush :param kwargs: \"\"\" self.state = TransportState()", "= defaultdict(int) def queue(self, event_type, data, flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type:", "+= 1 self.last_check = timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check = None", "_stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport", "elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing due to time since last", "encoding :param max_flush_time: Maximum time between flushes in seconds :param max_buffer_size: Maximum size", "import gzip import logging import threading import timeit from collections import defaultdict from", "\"flushing due to time since last flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time,", "logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport = Transport def", "1 self.last_check = timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check = None self.retry_number", "timeit.default_timer() - self._last_flush queue_size = 0 if queued_data.fileobj is None else queued_data.fileobj.tell() if", "def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a", "def _start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name", "logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class", "message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace = print_trace class Transport(object):", "to prepend to every queue :param compress_level: GZip compress level. If zero, no", "json_encoder from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message,", "and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler", "failure \"\"\" message = str(exception) logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\",", "class You must implement a send method.. \"\"\" async_mode = False def __init__(", "is not None else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size =", "- self._last_flush queue_size = 0 if queued_data.fileobj is None else queued_data.fileobj.tell() if flush:", "= None self.retry_number = -1 def should_try(self): if self.status == self.ONLINE: return True", "self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR = 0 def", "restarted at the end of the flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer()", "prepend to every queue :param compress_level: GZip compress level. If zero, no GZip", "= self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer() -", "data): \"\"\" You need to override this to do something with the actual", "transport failure back-off\") elif queued_data: fileobj = queued_data.fileobj # get a reference to", "metadata if metadata is not None else {} self._compress_level = min(9, max(0, compress_level", ":param max_flush_time: Maximum time between flushes in seconds :param max_buffer_size: Maximum size of", "> self._max_flush_time: logger.debug( \"flushing due to time since last flush %.3fs > max_flush_time", "print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace = print_trace class Transport(object): \"\"\" All", "subclass this class You must implement a send method.. \"\"\" async_mode = False", "not None else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size", "getbuffer, so we need to fall back to getvalue data = fileobj.getbuffer() if", "-1 def should_try(self): if self.status == self.ONLINE: return True interval = min(self.retry_number, 6)", "self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer() - self._last_flush", "= self._queued_data, None if queued_data and not self.state.should_try(): logger.error(\"dropping flushed data due to", "You need to override this to do something with the actual data. Usually", "async_mode = True sync_transport = Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs)", "min(self.retry_number, 6) ** 2 return timeit.default_timer() - self.last_check > interval def set_fail(self): self.status", "json_serializer: serializer to use for JSON encoding :param max_flush_time: Maximum time between flushes", "send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class", "self.status == self.ONLINE: return True interval = min(self.retry_number, 6) ** 2 return timeit.default_timer()", "TransportState() self._metadata = metadata if metadata is not None else {} self._compress_level =", "TransportState(object): ONLINE = 1 ERROR = 0 def __init__(self): self.status = self.ONLINE self.last_check", "closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler called", "self.data = data self.print_trace = print_trace class Transport(object): \"\"\" All transport implementations need", "self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue :param sync:", "not self._worker or not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def send_sync(self, data=None):", "if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata})", "elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data is None:", "True interval = min(self.retry_number, 6) ** 2 return timeit.default_timer() - self.last_check > interval", "*args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property def worker(self): if not", "+ \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer() - self._last_flush queue_size = 0", "synchronously in the current thread :param start_flush_timer: set to True if the flush", "if queued_data.fileobj is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time", "elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None, print_trace=True):", "self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing since queue", "-*- coding: utf-8 -*- import gzip import logging import threading import timeit from", ":param json_serializer: serializer to use for JSON encoding :param max_flush_time: Maximum time between", "server \"\"\" raise NotImplementedError def close(self): \"\"\" Cleans up resources and closes connection", "to the fileobj before closing the gzip file queued_data.close() # StringIO on Python", "class TransportState(object): ONLINE = 1 ERROR = 0 def __init__(self): self.status = self.ONLINE", "flushed data due to transport failure back-off\") elif queued_data: fileobj = queued_data.fileobj #", "__init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property def worker(self): if", "import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException,", "on successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called", "compress_level if compress_level is not None else 0)) self._json_serializer = json_serializer self._max_flush_time =", "flushes in seconds :param max_buffer_size: Maximum size of buffer before flush :param kwargs:", "since queue size %d bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush()", "= None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None self._counts =", "self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int) def", "timeout = timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush", "= min(9, max(0, compress_level if compress_level is not None else 0)) self._json_serializer =", "__init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a new", "flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer:", "transport implementations need to subclass this class You must implement a send method..", "Transport instance :param metadata: Metadata object to prepend to every queue :param compress_level:", ":return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if queued_data", "class Transport(object): \"\"\" All transport implementations need to subclass this class You must", "use for JSON encoding :param max_flush_time: Maximum time between flushes in seconds :param", "seconds :param max_buffer_size: Maximum size of buffer before flush :param kwargs: \"\"\" self.state", "logger.debug( \"flushing since queue size %d bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size", "\"flushing since queue size %d bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size )", "set to True if the flush timer thread should be restarted at the", "self._counts[event_type] += 1 since_last_flush = timeit.default_timer() - self._last_flush queue_size = 0 if queued_data.fileobj", "does not have getbuffer, so we need to fall back to getvalue data", "= print_trace class Transport(object): \"\"\" All transport implementations need to subclass this class", "{\"data\": data}) def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE =", "self._last_flush queue_size = 0 if queued_data.fileobj is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced", "self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing since queue size %d", "worker(self): if not self._worker or not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def", "hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and not sync: self.send_async(data) else: try:", "logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None):", "self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start()", "= 0 if queued_data.fileobj is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush()", "str(exception) logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self,", "handler called by the transport on successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None,", "try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync,", "since_last_flush > self._max_flush_time: logger.debug( \"flushing due to time since last flush %.3fs >", "flush timer thread should be restarted at the end of the flush :return:", "e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You", "or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon =", "AsyncWorker() return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as", "%r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout or", "metadata is not None else {} self._compress_level = min(9, max(0, compress_level if compress_level", "from elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception):", "queue_size = 0 if queued_data.fileobj is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\")", "self.ONLINE self.last_check = None self.retry_number = -1 def did_fail(self): return self.status == self.ERROR", "logger.error(\"dropping flushed data due to transport failure back-off\") elif queued_data: fileobj = queued_data.fileobj", "= False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\"", "set_success(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1 def did_fail(self): return", "self.retry_number += 1 self.last_check = timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check =", "self._last_flush = timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int) def queue(self, event_type, data,", ":param metadata: Metadata object to prepend to every queue :param compress_level: GZip compress", "ERROR = 0 def __init__(self): self.status = self.ONLINE self.last_check = None self.retry_number =", "before flush :param kwargs: \"\"\" self.state = TransportState() self._metadata = metadata if metadata", "else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data =", "= 0 def __init__(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1", "self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR = 0 def __init__(self): self.status =", "\"\"\" self.state = TransportState() self._metadata = metadata if metadata is not None else", "{} self._compress_level = min(9, max(0, compress_level if compress_level is not None else 0))", "**kwargs): \"\"\" Failure handler called by the transport on send failure \"\"\" message", "defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO", "= timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You need to override", "and queue_size > self._max_buffer_size: logger.debug( \"flushing since queue size %d bytes > max_queue_size", "AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class", "a server \"\"\" raise NotImplementedError def close(self): \"\"\" Cleans up resources and closes", "self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) +", "self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\",", "Maximum size of buffer before flush :param kwargs: \"\"\" self.state = TransportState() self._metadata", "Transport(object): \"\"\" All transport implementations need to subclass this class You must implement", "fileobj = queued_data.fileobj # get a reference to the fileobj before closing the", "super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property def worker(self): if not self._worker or", "flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1", "def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def", "timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling", "max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer()", "be restarted at the end of the flush :return: None \"\"\" with self._queue_lock:", "send failure \"\"\" message = str(exception) logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception,", "= fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and not sync:", "on Python 2 does not have getbuffer, so we need to fall back", "\"\"\" All transport implementations need to subclass this class You must implement a", "import json_encoder from elasticapm.utils.compat import BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self,", "\"\"\" raise NotImplementedError def close(self): \"\"\" Cleans up resources and closes connection :return:", "is sending to a server \"\"\" raise NotImplementedError def close(self): \"\"\" Cleans up", "= True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\")", "data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data})", "Failure handler called by the transport on send failure \"\"\" message = str(exception)", "= str(exception) logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def", "handler called by the transport on send failure \"\"\" message = str(exception) logger.error(\"Failed", "self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE", "self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush)", "with the actual data. Usually - this is sending to a server \"\"\"", "self._counts = defaultdict(int) def queue(self, event_type, data, flush=False): with self._queue_lock: queued_data = self.queued_data", "= threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush", "flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport):", ") self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing since queue size", "self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler called by the transport on", "self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode =", "- this is sending to a server \"\"\" raise NotImplementedError def close(self): \"\"\"", "at the end of the flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data,", "timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You need to override this", "if metadata is not None else {} self._compress_level = min(9, max(0, compress_level if", "else: try: self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if", "self.ONLINE self.last_check = None self.retry_number = -1 def should_try(self): if self.status == self.ONLINE:", "> max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer: with self._queue_lock:", "if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and not sync: self.send_async(data) else:", "if not self._worker or not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def send_sync(self,", "a reference to the fileobj before closing the gzip file queued_data.close() # StringIO", "thread :param start_flush_timer: set to True if the flush timer thread should be", "self.status = self.ONLINE self.last_check = None self.retry_number = -1 def should_try(self): if self.status", "**kwargs) self._worker = None @property def worker(self): if not self._worker or not self._worker.is_alive():", "every queue :param compress_level: GZip compress level. If zero, no GZip compression will", "not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data)", "object to prepend to every queue :param compress_level: GZip compress level. If zero,", "You must implement a send method.. \"\"\" async_mode = False def __init__( self,", "self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing due to time since last flush", "= max_buffer_size self._queued_data = None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer =", "self.state.should_try(): logger.error(\"dropping flushed data due to transport failure back-off\") elif queued_data: fileobj =", "= timeit.default_timer() - self._last_flush queue_size = 0 if queued_data.fileobj is None else queued_data.fileobj.tell()", "None else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data", "queue_size > self._max_buffer_size: logger.debug( \"flushing since queue size %d bytes > max_queue_size %d", "None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int)", "between flushes in seconds :param max_buffer_size: Maximum size of buffer before flush :param", "__init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace = print_trace class", "\"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called by the transport", "self.last_check = timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check = None self.retry_number =", "a send method.. \"\"\" async_mode = False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps,", "self.last_check > interval def set_fail(self): self.status = self.ERROR self.retry_number += 1 self.last_check =", "implementations need to subclass this class You must implement a send method.. \"\"\"", "compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a new Transport instance :param", "self._worker = None @property def worker(self): if not self._worker or not self._worker.is_alive(): self._worker", "instance :param metadata: Metadata object to prepend to every queue :param compress_level: GZip", "queued_data and not self.state.should_try(): logger.error(\"dropping flushed data due to transport failure back-off\") elif", "queued_data: fileobj = queued_data.fileobj # get a reference to the fileobj before closing", "end of the flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data =", "self._worker or not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker def send_sync(self, data=None): try:", "BytesIO logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message)", "file queued_data.close() # StringIO on Python 2 does not have getbuffer, so we", "max_buffer_size=None, **kwargs ): \"\"\" Create a new Transport instance :param metadata: Metadata object", "start_flush_timer: set to True if the flush timer thread should be restarted at", "e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close() if", "2 does not have getbuffer, so we need to fall back to getvalue", "logging import threading import timeit from collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker", "self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check", "= None self._counts = defaultdict(int) def queue(self, event_type, data, flush=False): with self._queue_lock: queued_data", "= metadata if metadata is not None else {} self._compress_level = min(9, max(0,", "to fall back to getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue()", "None @property def worker(self): if not self._worker or not self._worker.is_alive(): self._worker = AsyncWorker()", "self.handle_transport_success() except Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def", "\"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and not sync: self.send_async(data) else: try: self.send(data)", "exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time self._flush_timer", "start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You need to override this to do", "= (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\"", "the flush timer thread should be restarted at the end of the flush", "# -*- coding: utf-8 -*- import gzip import logging import threading import timeit", ":param start_flush_timer: set to True if the flush timer thread should be restarted", "flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing due", "= timeit.default_timer() def set_success(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1", "\"\"\" async_mode = False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs", "queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer() - self._last_flush queue_size", "not None else {} self._compress_level = min(9, max(0, compress_level if compress_level is not", "json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a new Transport instance :param metadata:", "by the transport on successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\"", "interval def set_fail(self): self.status = self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer() def", "self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR = 0 def __init__(self): self.status", "self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport = Transport", "flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if", "self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush =", "compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False,", "thread should be restarted at the end of the flush :return: None \"\"\"", "NotImplementedError def close(self): \"\"\" Cleans up resources and closes connection :return: \"\"\" self.flush(sync=True,", "metadata: Metadata object to prepend to every queue :param compress_level: GZip compress level.", "Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport,", "queued_data, self._queued_data = self._queued_data, None if queued_data and not self.state.should_try(): logger.error(\"dropping flushed data", "threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\")", "start_flush_timer=True): \"\"\" Flush the queue :param sync: if true, flushes the queue synchronously", "the flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None", "None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and since_last_flush >", "self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create a new Transport", "All transport implementations need to subclass this class You must implement a send", "-*- import gzip import logging import threading import timeit from collections import defaultdict", "\"\"\" Flush the queue :param sync: if true, flushes the queue synchronously in", "= \"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self):", "mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self,", "1 since_last_flush = timeit.default_timer() - self._last_flush queue_size = 0 if queued_data.fileobj is None", "be used :param json_serializer: serializer to use for JSON encoding :param max_flush_time: Maximum", "logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data", "data due to transport failure back-off\") elif queued_data: fileobj = queued_data.fileobj # get", "- self.last_check > interval def set_fail(self): self.status = self.ERROR self.retry_number += 1 self.last_check", "queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer()", "as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\"", "to getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\")", "self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if queued_data and not self.state.should_try(): logger.error(\"dropping flushed", "the end of the flush :return: None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data", "of buffer before flush :param kwargs: \"\"\" self.state = TransportState() self._metadata = metadata", "self.status = self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer() def set_success(self): self.status =", ":return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler called by the", "message = str(exception) logger.error(\"Failed to submit message: %r\", message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail()", "try: self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer:", "self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data is", "self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting", "since last flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size", "StringIO on Python 2 does not have getbuffer, so we need to fall", "** 2 return timeit.default_timer() - self.last_check > interval def set_fail(self): self.status = self.ERROR", "getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and", "closing the gzip file queued_data.close() # StringIO on Python 2 does not have", "fileobj before closing the gzip file queued_data.close() # StringIO on Python 2 does", "event_type, data, flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type]", "= AsyncWorker() return self._worker def send_sync(self, data=None): try: self.sync_transport.send(self, data) self.handle_transport_success() except Exception", "max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer()", "= timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\"", "for JSON encoding :param max_flush_time: Maximum time between flushes in seconds :param max_buffer_size:", "Python 2 does not have getbuffer, so we need to fall back to", "not have getbuffer, so we need to fall back to getvalue data =", "self._queued_data, None if queued_data and not self.state.should_try(): logger.error(\"dropping flushed data due to transport", "hasattr(self, \"send_async\") and not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception as", "def send(self, data): \"\"\" You need to override this to do something with", "_start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name =", "= data self.print_trace = print_trace class Transport(object): \"\"\" All transport implementations need to", "super(TransportException, self).__init__(message) self.data = data self.print_trace = print_trace class Transport(object): \"\"\" All transport", "+= 1 since_last_flush = timeit.default_timer() - self._last_flush queue_size = 0 if queued_data.fileobj is", "back-off\") elif queued_data: fileobj = queued_data.fileobj # get a reference to the fileobj", "%d bytes > max_queue_size %d bytes\", queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer:", "the current thread :param start_flush_timer: set to True if the flush timer thread", "level. If zero, no GZip compression will be used :param json_serializer: serializer to", "self.print_trace = print_trace class Transport(object): \"\"\" All transport implementations need to subclass this", "timeout or self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon", "ONLINE = 1 ERROR = 0 def __init__(self): self.status = self.ONLINE self.last_check =", "called by the transport on successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs):", "send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called by the", "= self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer() def set_success(self): self.status = self.ONLINE", "queue :param compress_level: GZip compress level. If zero, no GZip compression will be", "flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue :param sync: if true, flushes the", "zero, no GZip compression will be used :param json_serializer: serializer to use for", "queued_data.fileobj # get a reference to the fileobj before closing the gzip file", "> max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size:", "class TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace", "do something with the actual data. Usually - this is sending to a", "0 def __init__(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1 def", "since_last_flush = timeit.default_timer() - self._last_flush queue_size = 0 if queued_data.fileobj is None else", "%.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing", "max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug(", "send(self, data): \"\"\" You need to override this to do something with the", "True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel()", "TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data = data self.print_trace =", "__init__(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1 def should_try(self): if", "time since last flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif", "sync_transport = Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None", "buffer before flush :param kwargs: \"\"\" self.state = TransportState() self._metadata = metadata if", "last flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and", "threading import timeit from collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils", "queue(self, event_type, data, flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\"))", "= 1 ERROR = 0 def __init__(self): self.status = self.ONLINE self.last_check = None", "None self.retry_number = -1 def should_try(self): if self.status == self.ONLINE: return True interval", "**kwargs ): \"\"\" Create a new Transport instance :param metadata: Metadata object to", "self._max_flush_time: logger.debug( \"flushing due to time since last flush %.3fs > max_flush_time %.3fs\",", "AsyncTransport(Transport): async_mode = True sync_transport = Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args,", "if the flush timer thread should be restarted at the end of the", "= self.ONLINE self.last_check = None self.retry_number = -1 def should_try(self): if self.status ==", "JSON encoding :param max_flush_time: Maximum time between flushes in seconds :param max_buffer_size: Maximum", "self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start() def", "Maximum time between flushes in seconds :param max_buffer_size: Maximum size of buffer before", "gzip import logging import threading import timeit from collections import defaultdict from elasticapm.contrib.async_worker", "\"elasticapm flush timer\" self._flush_timer.daemon = True logger.debug(\"Starting flush timer\") self._flush_timer.start() def _stop_flush_timer(self): if", "logger = logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data", "self._queued_data = None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None self._counts", "queued_data(self): if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\":", "the queue :param sync: if true, flushes the queue synchronously in the current", "used :param json_serializer: serializer to use for JSON encoding :param max_flush_time: Maximum time", "we need to fall back to getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\")", "interval = min(self.retry_number, 6) ** 2 return timeit.default_timer() - self.last_check > interval def", "@property def queued_data(self): if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data", "elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat import BytesIO logger =", "Metadata object to prepend to every queue :param compress_level: GZip compress level. If", "with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush", "\"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler called by the transport", "None \"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if queued_data and", "coding: utf-8 -*- import gzip import logging import threading import timeit from collections", "\"\"\" with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if queued_data and not", "False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None, max_buffer_size=None, **kwargs ): \"\"\" Create", "import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat import", "utf-8 -*- import gzip import logging import threading import timeit from collections import", "self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data is None: self._queued_data =", "self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if queued_data and not self.state.should_try(): logger.error(\"dropping", "timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport = Transport def __init__(self, *args,", "max_buffer_size self._queued_data = None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer = None", "except Exception as e: self.handle_transport_fail(exception=e) def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self):", "self.status = self.ONLINE self.last_check = None self.retry_number = -1 def did_fail(self): return self.status", "# get a reference to the fileobj before closing the gzip file queued_data.close()", "send method.. \"\"\" async_mode = False def __init__( self, metadata=None, compress_level=5, json_serializer=json_encoder.dumps, max_flush_time=None,", "the queue synchronously in the current thread :param start_flush_timer: set to True if", "due to transport failure back-off\") elif queued_data: fileobj = queued_data.fileobj # get a", "message, exc_info=getattr(exception, \"print_trace\", True)) self.state.set_fail() def _start_flush_timer(self, timeout=None): timeout = timeout or self._max_flush_time", ":param max_buffer_size: Maximum size of buffer before flush :param kwargs: \"\"\" self.state =", "should be restarted at the end of the flush :return: None \"\"\" with", "self._flush_timer.cancel() class AsyncTransport(Transport): async_mode = True sync_transport = Transport def __init__(self, *args, **kwargs):", "the transport on send failure \"\"\" message = str(exception) logger.error(\"Failed to submit message:", "on send failure \"\"\" message = str(exception) logger.error(\"Failed to submit message: %r\", message,", "timer\") self._flush_timer.start() def _stop_flush_timer(self): if self._flush_timer: logger.debug(\"Cancelling flush timer\") self._flush_timer.cancel() class AsyncTransport(Transport): async_mode", "self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer()", "to True if the flush timer thread should be restarted at the end", "> self._max_buffer_size: logger.debug( \"flushing since queue size %d bytes > max_queue_size %d bytes\",", "= logging.getLogger(\"elasticapm.transport\") class TransportException(Exception): def __init__(self, message, data=None, print_trace=True): super(TransportException, self).__init__(message) self.data =", "\"\"\" Failure handler called by the transport on send failure \"\"\" message =", "self._queued_data = self._queued_data, None if queued_data and not self.state.should_try(): logger.error(\"dropping flushed data due", "must implement a send method.. \"\"\" async_mode = False def __init__( self, metadata=None,", "None else {} self._compress_level = min(9, max(0, compress_level if compress_level is not None", "= timeit.default_timer() self._flush_timer = None self._counts = defaultdict(int) def queue(self, event_type, data, flush=False):", "timer thread should be restarted at the end of the flush :return: None", "with self._queue_lock: self._stop_flush_timer() queued_data, self._queued_data = self._queued_data, None if queued_data and not self.state.should_try():", "self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the", "def set_success(self): self.status = self.ONLINE self.last_check = None self.retry_number = -1 def did_fail(self):", "if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You need to override this to", "self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data): \"\"\" You need to", "to override this to do something with the actual data. Usually - this", "compress_level: GZip compress level. If zero, no GZip compression will be used :param", "this to do something with the actual data. Usually - this is sending", "gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def", "sending to a server \"\"\" raise NotImplementedError def close(self): \"\"\" Cleans up resources", "to subclass this class You must implement a send method.. \"\"\" async_mode =", "queued_data.fileobj is None else queued_data.fileobj.tell() if flush: logger.debug(\"forced flush\") self.flush() elif self._max_flush_time and", "data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True):", "implement a send method.. \"\"\" async_mode = False def __init__( self, metadata=None, compress_level=5,", "Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self, data):", "is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\")", "logger.debug( \"flushing due to time since last flush %.3fs > max_flush_time %.3fs\", since_last_flush,", "compress_level is not None else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size", "current thread :param start_flush_timer: set to True if the flush timer thread should", "need to subclass this class You must implement a send method.. \"\"\" async_mode", "kwargs: \"\"\" self.state = TransportState() self._metadata = metadata if metadata is not None", "to time since last flush %.3fs > max_flush_time %.3fs\", since_last_flush, self._max_flush_time, ) self.flush()", "data, flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) + \"\\n\").encode(\"utf-8\")) self._counts[event_type] +=", ") self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if self._queued_data", "> interval def set_fail(self): self.status = self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer()", "by the transport on send failure \"\"\" message = str(exception) logger.error(\"Failed to submit", "= queued_data.fileobj # get a reference to the fileobj before closing the gzip", "= -1 def should_try(self): if self.status == self.ONLINE: return True interval = min(self.retry_number,", "self._start_flush_timer() @property def queued_data(self): if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level)", "def queue(self, event_type, data, flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data}) +", "bytes\", queue_size, self._max_buffer_size ) self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def", "since_last_flush, self._max_flush_time, ) self.flush() elif self._max_buffer_size and queue_size > self._max_buffer_size: logger.debug( \"flushing since", "Cleans up resources and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs):", "override this to do something with the actual data. Usually - this is", "need to fall back to getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else", "else fileobj.getvalue() if hasattr(self, \"send_async\") and not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success()", "defaultdict(int) def queue(self, event_type, data, flush=False): with self._queue_lock: queued_data = self.queued_data queued_data.write((self._json_serializer({event_type: data})", "except Exception as e: self.handle_transport_fail(e) self._last_flush = timeit.default_timer() if start_flush_timer: self._start_flush_timer() def send(self,", "up resources and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\"", "def queued_data(self): if self._queued_data is None: self._queued_data = gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data =", "the gzip file queued_data.close() # StringIO on Python 2 does not have getbuffer,", "the fileobj before closing the gzip file queued_data.close() # StringIO on Python 2", "\"\"\" You need to override this to do something with the actual data.", "to every queue :param compress_level: GZip compress level. If zero, no GZip compression", "self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock = threading.Lock() self._last_flush = timeit.default_timer() self._flush_timer", "data. Usually - this is sending to a server \"\"\" raise NotImplementedError def", "self.flush() elif self._max_flush_time and since_last_flush > self._max_flush_time: logger.debug( \"flushing due to time since", "have getbuffer, so we need to fall back to getvalue data = fileobj.getbuffer()", "sync: if true, flushes the queue synchronously in the current thread :param start_flush_timer:", "to use for JSON encoding :param max_flush_time: Maximum time between flushes in seconds", "\"send_async\") and not sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception as e:", "def set_fail(self): self.status = self.ERROR self.retry_number += 1 self.last_check = timeit.default_timer() def set_success(self):", "import threading import timeit from collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from", "GZip compression will be used :param json_serializer: serializer to use for JSON encoding", "return timeit.default_timer() - self.last_check > interval def set_fail(self): self.status = self.ERROR self.retry_number +=", "data self.print_trace = print_trace class Transport(object): \"\"\" All transport implementations need to subclass", "connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler called by", "start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success handler called by the transport on successful", "flushes the queue synchronously in the current thread :param start_flush_timer: set to True", "# StringIO on Python 2 does not have getbuffer, so we need to", "back to getvalue data = fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self,", "= gzip.GzipFile(fileobj=BytesIO(), mode=\"w\", compresslevel=self._compress_level) data = (self._json_serializer({\"metadata\": self._metadata}) + \"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data", "True if the flush timer thread should be restarted at the end of", "self._max_flush_time self._flush_timer = threading.Timer(timeout, self.flush) self._flush_timer.name = \"elasticapm flush timer\" self._flush_timer.daemon = True", "timeit from collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder", "def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler called by the transport on send", "data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object):", "\"\\n\").encode(\"utf-8\") self._queued_data.write(data) return self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue :param", "actual data. Usually - this is sending to a server \"\"\" raise NotImplementedError", "timeit.default_timer() - self.last_check > interval def set_fail(self): self.status = self.ERROR self.retry_number += 1", "self._queued_data def flush(self, sync=False, start_flush_timer=True): \"\"\" Flush the queue :param sync: if true,", "self._start_flush_timer() def send(self, data): \"\"\" You need to override this to do something", "def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated() class TransportState(object): ONLINE = 1 ERROR", "If zero, no GZip compression will be used :param json_serializer: serializer to use", "Flush the queue :param sync: if true, flushes the queue synchronously in the", "== self.ONLINE: return True interval = min(self.retry_number, 6) ** 2 return timeit.default_timer() -", "self.last_check = None self.retry_number = -1 def should_try(self): if self.status == self.ONLINE: return", "@property def worker(self): if not self._worker or not self._worker.is_alive(): self._worker = AsyncWorker() return", "resources and closes connection :return: \"\"\" self.flush(sync=True, start_flush_timer=False) def handle_transport_success(self, **kwargs): \"\"\" Success", ":param sync: if true, flushes the queue synchronously in the current thread :param", "self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock", "def send_async(self, data): self.worker.queue(self.send_sync, {\"data\": data}) def close(self): super(AsyncTransport, self).close() if self._worker: self._worker.main_thread_terminated()", "collections import defaultdict from elasticapm.contrib.async_worker import AsyncWorker from elasticapm.utils import json_encoder from elasticapm.utils.compat", "def worker(self): if not self._worker or not self._worker.is_alive(): self._worker = AsyncWorker() return self._worker", "0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None", "will be used :param json_serializer: serializer to use for JSON encoding :param max_flush_time:", "\"\\n\").encode(\"utf-8\")) self._counts[event_type] += 1 since_last_flush = timeit.default_timer() - self._last_flush queue_size = 0 if", "= TransportState() self._metadata = metadata if metadata is not None else {} self._compress_level", "<reponame>shareablee/apm-agent-python # -*- coding: utf-8 -*- import gzip import logging import threading import", "print_trace class Transport(object): \"\"\" All transport implementations need to subclass this class You", "exception=None, **kwargs): \"\"\" Failure handler called by the transport on send failure \"\"\"", "True sync_transport = Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker =", "self._max_buffer_size ) self.flush() elif not self._flush_timer: with self._queue_lock: self._start_flush_timer() @property def queued_data(self): if", "fileobj.getbuffer() if hasattr(fileobj, \"getbuffer\") else fileobj.getvalue() if hasattr(self, \"send_async\") and not sync: self.send_async(data)", "sync: self.send_async(data) else: try: self.send(data) self.handle_transport_success() except Exception as e: self.handle_transport_fail(e) self._last_flush =", "flush :param kwargs: \"\"\" self.state = TransportState() self._metadata = metadata if metadata is", "2 return timeit.default_timer() - self.last_check > interval def set_fail(self): self.status = self.ERROR self.retry_number", "if compress_level is not None else 0)) self._json_serializer = json_serializer self._max_flush_time = max_flush_time", "raise NotImplementedError def close(self): \"\"\" Cleans up resources and closes connection :return: \"\"\"", "self._max_flush_time = max_flush_time self._max_buffer_size = max_buffer_size self._queued_data = None self._queue_lock = threading.Lock() self._last_flush", "not self.state.should_try(): logger.error(\"dropping flushed data due to transport failure back-off\") elif queued_data: fileobj", "Transport def __init__(self, *args, **kwargs): super(AsyncTransport, self).__init__(*args, **kwargs) self._worker = None @property def", "transport on successful send \"\"\" self.state.set_success() def handle_transport_fail(self, exception=None, **kwargs): \"\"\" Failure handler" ]
[ "def distinct_in_window(arr, win_sz) -> list: result = [] curr_dict = dict() for i", "elements in each k sized window in this array\"\"\" def distinct_in_window(arr, win_sz) ->", "Count Distinct Elements in Each Window: We are given an array and a", "in this array\"\"\" def distinct_in_window(arr, win_sz) -> list: result = [] curr_dict =", "= dict() for i in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1", "v in curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return result def main(): arr_input", "-> list: result = [] curr_dict = dict() for i in range(win_sz): if", "1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 m", "curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 m += 1 curr_dict = {k:", "= 0 for i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in", "def main(): arr_input = [10, 20, 20, 10, 30, 40, 10] k =", "list: result = [] curr_dict = dict() for i in range(win_sz): if arr[i]", "sized window in this array\"\"\" def distinct_in_window(arr, win_sz) -> list: result = []", "in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1", "30, 40, 10] k = 4 print(distinct_in_window(arr_input, k)) # Using the special variable", "result.append(len(curr_dict.keys())) m = 0 for i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if", "curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] =", "m += 1 curr_dict = {k: v for k, v in curr_dict.items() if", "m = 0 for i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i]", "window in this array\"\"\" def distinct_in_window(arr, win_sz) -> list: result = [] curr_dict", "(k<=n). We need to find distinct elements in each k sized window in", "v for k, v in curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return result", "for i in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]]", "arr_input = [10, 20, 20, 10, 30, 40, 10] k = 4 print(distinct_in_window(arr_input,", "distinct elements in each k sized window in this array\"\"\" def distinct_in_window(arr, win_sz)", "1 m += 1 curr_dict = {k: v for k, v in curr_dict.items()", "arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 m += 1", "i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] +=", "curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0 for i in range(win_sz, len(arr)): curr_dict[arr[m]]", "k)) # Using the special variable # __name__ if __name__ == \"__main__\": main()", "Window: We are given an array and a number k (k<=n). We need", "array\"\"\" def distinct_in_window(arr, win_sz) -> list: result = [] curr_dict = dict() for", "k (k<=n). We need to find distinct elements in each k sized window", "v != 0} result.append(len(curr_dict.keys())) return result def main(): arr_input = [10, 20, 20,", "in each k sized window in this array\"\"\" def distinct_in_window(arr, win_sz) -> list:", "Each Window: We are given an array and a number k (k<=n). We", "range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys()))", "k, v in curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return result def main():", "distinct_in_window(arr, win_sz) -> list: result = [] curr_dict = dict() for i in", "{k: v for k, v in curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return", "result = [] curr_dict = dict() for i in range(win_sz): if arr[i] in", "= [10, 20, 20, 10, 30, 40, 10] k = 4 print(distinct_in_window(arr_input, k))", "= [] curr_dict = dict() for i in range(win_sz): if arr[i] in curr_dict.keys():", "[] curr_dict = dict() for i in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]]", "Distinct Elements in Each Window: We are given an array and a number", "+= 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0 for i in", "number k (k<=n). We need to find distinct elements in each k sized", "an array and a number k (k<=n). We need to find distinct elements", "1 result.append(len(curr_dict.keys())) m = 0 for i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1", "in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0", "4 print(distinct_in_window(arr_input, k)) # Using the special variable # __name__ if __name__ ==", "each k sized window in this array\"\"\" def distinct_in_window(arr, win_sz) -> list: result", "else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0 for i in range(win_sz, len(arr)):", "= {k: v for k, v in curr_dict.items() if v != 0} result.append(len(curr_dict.keys()))", "\"\"\" Count Distinct Elements in Each Window: We are given an array and", "a number k (k<=n). We need to find distinct elements in each k", "return result def main(): arr_input = [10, 20, 20, 10, 30, 40, 10]", "curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 m += 1 curr_dict =", "+= 1 curr_dict = {k: v for k, v in curr_dict.items() if v", "We need to find distinct elements in each k sized window in this", "0} result.append(len(curr_dict.keys())) return result def main(): arr_input = [10, 20, 20, 10, 30,", "We are given an array and a number k (k<=n). We need to", "arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m =", "given an array and a number k (k<=n). We need to find distinct", "in curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return result def main(): arr_input =", "40, 10] k = 4 print(distinct_in_window(arr_input, k)) # Using the special variable #", "print(distinct_in_window(arr_input, k)) # Using the special variable # __name__ if __name__ == \"__main__\":", "!= 0} result.append(len(curr_dict.keys())) return result def main(): arr_input = [10, 20, 20, 10,", "k sized window in this array\"\"\" def distinct_in_window(arr, win_sz) -> list: result =", "if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 m +=", "in Each Window: We are given an array and a number k (k<=n).", "dict() for i in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else:", "range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else:", "10, 30, 40, 10] k = 4 print(distinct_in_window(arr_input, k)) # Using the special", "= 1 result.append(len(curr_dict.keys())) m = 0 for i in range(win_sz, len(arr)): curr_dict[arr[m]] -=", "[10, 20, 20, 10, 30, 40, 10] k = 4 print(distinct_in_window(arr_input, k)) #", "+= 1 else: curr_dict[arr[i]] = 1 m += 1 curr_dict = {k: v", "curr_dict = {k: v for k, v in curr_dict.items() if v != 0}", "curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0 for i", "else: curr_dict[arr[i]] = 1 m += 1 curr_dict = {k: v for k,", "i in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] =", "0 for i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys():", "this array\"\"\" def distinct_in_window(arr, win_sz) -> list: result = [] curr_dict = dict()", "need to find distinct elements in each k sized window in this array\"\"\"", "win_sz) -> list: result = [] curr_dict = dict() for i in range(win_sz):", "and a number k (k<=n). We need to find distinct elements in each", "-= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1", "to find distinct elements in each k sized window in this array\"\"\" def", "curr_dict[arr[i]] = 1 m += 1 curr_dict = {k: v for k, v", "1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0 for i in range(win_sz,", "20, 10, 30, 40, 10] k = 4 print(distinct_in_window(arr_input, k)) # Using the", "= 4 print(distinct_in_window(arr_input, k)) # Using the special variable # __name__ if __name__", "for k, v in curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return result def", "curr_dict.items() if v != 0} result.append(len(curr_dict.keys())) return result def main(): arr_input = [10,", "1 else: curr_dict[arr[i]] = 1 m += 1 curr_dict = {k: v for", "len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]]", "if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m", "if v != 0} result.append(len(curr_dict.keys())) return result def main(): arr_input = [10, 20,", "main(): arr_input = [10, 20, 20, 10, 30, 40, 10] k = 4", "20, 20, 10, 30, 40, 10] k = 4 print(distinct_in_window(arr_input, k)) # Using", "k = 4 print(distinct_in_window(arr_input, k)) # Using the special variable # __name__ if", "are given an array and a number k (k<=n). We need to find", "result.append(len(curr_dict.keys())) return result def main(): arr_input = [10, 20, 20, 10, 30, 40,", "in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 m += 1 curr_dict", "curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1 result.append(len(curr_dict.keys())) m = 0 for", "10] k = 4 print(distinct_in_window(arr_input, k)) # Using the special variable # __name__", "1 curr_dict = {k: v for k, v in curr_dict.items() if v !=", "for i in range(win_sz, len(arr)): curr_dict[arr[m]] -= 1 if arr[i] in curr_dict.keys(): curr_dict[arr[i]]", "in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] += 1 else: curr_dict[arr[i]] = 1", "= 1 m += 1 curr_dict = {k: v for k, v in", "curr_dict = dict() for i in range(win_sz): if arr[i] in curr_dict.keys(): curr_dict[arr[i]] +=", "array and a number k (k<=n). We need to find distinct elements in", "find distinct elements in each k sized window in this array\"\"\" def distinct_in_window(arr,", "Elements in Each Window: We are given an array and a number k", "result def main(): arr_input = [10, 20, 20, 10, 30, 40, 10] k" ]
[ "filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data']", "= pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0) #", "#u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0,", "\"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height #u_init,", "from matplotlib import pyplot as plt import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\")", "by putting convergence at surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step", "w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X [km]', fontsize=20)", "np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams())", "# Test mass continuity by putting convergence at surface and divergence aloft berr_grid.fields['DT']['data']", "= pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0,", "cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X", "import pyart import pydda from matplotlib import pyplot as plt import numpy as", "plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density],", "= cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density],", "matplotlib import pyplot as plt import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid", "v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT')", "= plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X [km]', fontsize=20) plt.ylabel('Z [m]',", "pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init", "= sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid,", "vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3, 5,", "cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2),", "pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3,", "data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT',", "plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs)", "aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 - do iterations with just data", "sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid,", "10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]')", "lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs", "at surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 - do", "berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 - do iterations with just data Grids", "w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init,", "plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10))", "continuity by putting convergence at surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] #", "pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height", "wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init =", "v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid,", "vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False)", "9.0, 15e3, 20e3, 5, 0, -20e3, 0) # Test mass continuity by putting", "import pyplot as plt import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid =", "w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT',", "cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density])", "[dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5)", "# cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0) # Test mass", "vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow)", "putting convergence at surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1", "'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::],", "Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT',", "cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False)", "cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0) # Test mass continuity", "plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs =", "-20e3, 0) # Test mass continuity by putting convergence at surface and divergence", "mass continuity by putting convergence at surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data']", "z_back = sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init", "sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init,", "pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind", "# Step 1 - do iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid],", "pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind", "sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3,", "cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::],", "do iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back,", "0) # Test mass continuity by putting convergence at surface and divergence aloft", "cpol_grid.fields['DT']['data'] # Step 1 - do iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid,", "4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z", "= sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init,", "- do iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back,", "print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height #u_init, v_init,", "w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0)", "v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6,", "u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( #", "cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X [km]', fontsize=20) plt.ylabel('Z", "u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X [km]',", "plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16,", "as plt import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding", "= pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back =", "sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back", "as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection)", "#u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding,", "= pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init,", "just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0,", "u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8))", "= pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back =", "Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4,", "15e3, 20e3, 5, 0, -20e3, 0) # Test mass continuity by putting convergence", "= cpol_grid.fields['DT']['data'] # Step 1 - do iterations with just data Grids =", "0, -20e3, 0) # Test mass continuity by putting convergence at surface and", "v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3,", "vel_field='VT') u_init, v_init, w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field(", "berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back", "numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\")", "30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0) # Test mass continuity by", "cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X [km]', fontsize=20) plt.ylabel('Z [m]', fontsize=20) plt.show()", "pyplot as plt import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\")", "1 - do iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init,", "plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::],", "cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density],", "pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::],", "v_back = sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT')", "5, 0, -20e3, 0) # Test mass continuity by putting convergence at surface", "cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back", "frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z =", "Test mass continuity by putting convergence at surface and divergence aloft berr_grid.fields['DT']['data'] =", "cpol_z[::,lat_level,::], cmap=pyart.graph.cm_colorblind.HomeyerRainbow) plt.colorbar(label='Z [dBZ]') plt.barbs(cpol_x[::barb_density_vert,lat_level,::barb_density], cpol_h[::barb_density_vert,lat_level,::barb_density], u['data'][::barb_density_vert,lat_level,::barb_density], w['data'][::barb_density_vert,lat_level,::barb_density]) cs = plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::],", "= pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde( \"/home/rjackson/data/soundings/twpsondewnpnC3.b1.20060119.231600.custom.cdf\") print(berr_grid.projection) print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back =", "print(cpol_grid.get_projparams()) u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init", "20e3, 5, 0, -20e3, 0) # Test mass continuity by putting convergence at", "= sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0), vel_field='VT') u_init, v_init, w_init =", "w_init = pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30,", "plt.contour(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], w['data'][::,lat_level,::], levels=np.arange(1,20,2), linewidth=16, alpha=0.5) plt.clabel(cs) plt.xlabel('X [km]', fontsize=20) plt.ylabel('Z [m]', fontsize=20)", "Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10])", "mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45", "convergence at surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 -", "pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0, 15e3, 20e3, 5, 0, -20e3, 0) # Test", "z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1,", "refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids, 'DT', level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z", "iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back,", "pyart import pydda from matplotlib import pyplot as plt import numpy as np", "sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init = pydda.retrieval.make_constant_wind_field(cpol_grid, wind=(0.0,0.0,0.0),", "with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0,", "plt import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding =", "Step 1 - do iterations with just data Grids = pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init,", "v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0, mask_w_outside_opt=False) plt.figure(figsize=(8,8)) pydda.vis.plot_horiz_xsection_barbs(Grids,", "u_back = sounding[1].u_wind v_back = sounding[1].v_wind z_back = sounding[1].height #u_init, v_init, w_init =", "and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 - do iterations with", "= pydda.retrieval.make_wind_field_from_profile(cpol_grid, sounding, vel_field='VT') #u_init, v_init, w_init = pydda.retrieval.make_test_divergence_field( # cpol_grid, 30, 9.0,", "level=6, vel_contours=[1, 4, 10]) plt.interactive(False) cpol_z = cpol_grid.fields['DT']['data'] lat_level=45 plt.figure(figsize=(10,10)) plt.pcolormesh(cpol_x[::,lat_level,::], cpol_h[::,lat_level,::], cpol_z[::,lat_level,::],", "import numpy as np berr_grid = pyart.io.read_grid(\"berr_Darwin_hires.nc\") cpol_grid = pyart.io.read_grid(\"cpol_Darwin_hires.nc\") sounding = pyart.io.read_arm_sonde(", "pydda.retrieval.get_dd_wind_field([berr_grid, cpol_grid], u_init, v_init, w_init,u_back=u_back, v_back=v_back, z_back=z_back, Co=100.0, Cm=1500.0, vel_name='VT', refl_field='DT', frz=5000.0, filt_iterations=0,", "surface and divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 - do iterations", "import pydda from matplotlib import pyplot as plt import numpy as np berr_grid", "divergence aloft berr_grid.fields['DT']['data'] = cpol_grid.fields['DT']['data'] # Step 1 - do iterations with just", "pydda from matplotlib import pyplot as plt import numpy as np berr_grid =" ]
[ "d = dd(int) on = True for m in get_toks(): m = int(m)", "it.__next__ get_toks, get_tok = make_get_toks() from collections import defaultdict as dd n, k", "functions out of iterable of split strings\" from sys import stdin from itertools", "make_get_toks(f=None): \"make iterator and next functions out of iterable of split strings\" from", "def the_it(): \"so that both results are callable in similar manner\" return it", "True for m in get_toks(): m = int(m) if m % n ==", "chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok = make_get_toks() from collections import defaultdict", "both results are callable in similar manner\" return it if f is None:", "manner\" return it if f is None: f = stdin it = chain.from_iterable(map(sp,", "= int(m) if m % n == 0: d[m] += 1 if d[m]", "strings\" from sys import stdin from itertools import chain def sp(ln): \"to split", "\"so that both results are callable in similar manner\" return it if f", "def make_get_toks(f=None): \"make iterator and next functions out of iterable of split strings\"", "\"make iterator and next functions out of iterable of split strings\" from sys", "with a map\" return ln.split() def the_it(): \"so that both results are callable", "n == 0: d[m] += 1 if d[m] >= k and on: print(m)", "if m % n == 0: d[m] += 1 if d[m] >= k", "n, k = int(get_tok()), int(get_tok()) d = dd(int) on = True for m", "stdin from itertools import chain def sp(ln): \"to split the strings with a", "sys import stdin from itertools import chain def sp(ln): \"to split the strings", "in get_toks(): m = int(m) if m % n == 0: d[m] +=", "make_get_toks() from collections import defaultdict as dd n, k = int(get_tok()), int(get_tok()) d", "import stdin from itertools import chain def sp(ln): \"to split the strings with", "a map\" return ln.split() def the_it(): \"so that both results are callable in", "if f is None: f = stdin it = chain.from_iterable(map(sp, f)) return the_it,", "f)) return the_it, it.__next__ get_toks, get_tok = make_get_toks() from collections import defaultdict as", "split the strings with a map\" return ln.split() def the_it(): \"so that both", "on = True for m in get_toks(): m = int(m) if m %", "get_toks(): m = int(m) if m % n == 0: d[m] += 1", "get_tok = make_get_toks() from collections import defaultdict as dd n, k = int(get_tok()),", "the_it(): \"so that both results are callable in similar manner\" return it if", "the_it, it.__next__ get_toks, get_tok = make_get_toks() from collections import defaultdict as dd n,", "return it if f is None: f = stdin it = chain.from_iterable(map(sp, f))", "in similar manner\" return it if f is None: f = stdin it", "out of iterable of split strings\" from sys import stdin from itertools import", "= True for m in get_toks(): m = int(m) if m % n", "is None: f = stdin it = chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks,", "f is None: f = stdin it = chain.from_iterable(map(sp, f)) return the_it, it.__next__", "if d[m] >= k and on: print(m) on = False if on: print(\"none\")", "int(get_tok()), int(get_tok()) d = dd(int) on = True for m in get_toks(): m", "import chain def sp(ln): \"to split the strings with a map\" return ln.split()", "\"to split the strings with a map\" return ln.split() def the_it(): \"so that", "m in get_toks(): m = int(m) if m % n == 0: d[m]", "callable in similar manner\" return it if f is None: f = stdin", "collections import defaultdict as dd n, k = int(get_tok()), int(get_tok()) d = dd(int)", "k = int(get_tok()), int(get_tok()) d = dd(int) on = True for m in", "for m in get_toks(): m = int(m) if m % n == 0:", "and next functions out of iterable of split strings\" from sys import stdin", "of iterable of split strings\" from sys import stdin from itertools import chain", "m % n == 0: d[m] += 1 if d[m] >= k and", "next functions out of iterable of split strings\" from sys import stdin from", "from sys import stdin from itertools import chain def sp(ln): \"to split the", "map\" return ln.split() def the_it(): \"so that both results are callable in similar", "from collections import defaultdict as dd n, k = int(get_tok()), int(get_tok()) d =", "= int(get_tok()), int(get_tok()) d = dd(int) on = True for m in get_toks():", "defaultdict as dd n, k = int(get_tok()), int(get_tok()) d = dd(int) on =", "return ln.split() def the_it(): \"so that both results are callable in similar manner\"", "iterable of split strings\" from sys import stdin from itertools import chain def", "of split strings\" from sys import stdin from itertools import chain def sp(ln):", "+= 1 if d[m] >= k and on: print(m) on = False if", "itertools import chain def sp(ln): \"to split the strings with a map\" return", "as dd n, k = int(get_tok()), int(get_tok()) d = dd(int) on = True", "= make_get_toks() from collections import defaultdict as dd n, k = int(get_tok()), int(get_tok())", "int(get_tok()) d = dd(int) on = True for m in get_toks(): m =", "return the_it, it.__next__ get_toks, get_tok = make_get_toks() from collections import defaultdict as dd", "None: f = stdin it = chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok", "int(m) if m % n == 0: d[m] += 1 if d[m] >=", "m = int(m) if m % n == 0: d[m] += 1 if", "strings with a map\" return ln.split() def the_it(): \"so that both results are", "ln.split() def the_it(): \"so that both results are callable in similar manner\" return", "iterator and next functions out of iterable of split strings\" from sys import", "1 if d[m] >= k and on: print(m) on = False if on:", "it = chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok = make_get_toks() from collections", "0: d[m] += 1 if d[m] >= k and on: print(m) on =", "results are callable in similar manner\" return it if f is None: f", "= dd(int) on = True for m in get_toks(): m = int(m) if", "dd n, k = int(get_tok()), int(get_tok()) d = dd(int) on = True for", "get_toks, get_tok = make_get_toks() from collections import defaultdict as dd n, k =", "== 0: d[m] += 1 if d[m] >= k and on: print(m) on", "split strings\" from sys import stdin from itertools import chain def sp(ln): \"to", "stdin it = chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok = make_get_toks() from", "= stdin it = chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok = make_get_toks()", "it if f is None: f = stdin it = chain.from_iterable(map(sp, f)) return", "d[m] += 1 if d[m] >= k and on: print(m) on = False", "def sp(ln): \"to split the strings with a map\" return ln.split() def the_it():", "that both results are callable in similar manner\" return it if f is", "the strings with a map\" return ln.split() def the_it(): \"so that both results", "similar manner\" return it if f is None: f = stdin it =", "% n == 0: d[m] += 1 if d[m] >= k and on:", "are callable in similar manner\" return it if f is None: f =", "import defaultdict as dd n, k = int(get_tok()), int(get_tok()) d = dd(int) on", "chain def sp(ln): \"to split the strings with a map\" return ln.split() def", "= chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok = make_get_toks() from collections import", "f = stdin it = chain.from_iterable(map(sp, f)) return the_it, it.__next__ get_toks, get_tok =", "from itertools import chain def sp(ln): \"to split the strings with a map\"", "dd(int) on = True for m in get_toks(): m = int(m) if m", "sp(ln): \"to split the strings with a map\" return ln.split() def the_it(): \"so" ]
[ "requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config import Config", "acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config import Config def add_collex(survey_id,", "'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event", "'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid", "import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config import", "survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date", "'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference')", "from acceptance_tests.utilities.test_case_helper import test_helper from config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name =", "config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\")", "'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event =", "import test_helper from config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> '", "f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z',", "f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body)", "collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey", "exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date =", "{'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'},", "collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules':", "start_date = datetime.utcnow() end_date = start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body =", "= response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id,", "collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date =", "= start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id,", "'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status() collex_id =", "'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date =", "collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'],", "<reponame>ONSdigital/ssdc-rm-acceptance-tests from datetime import datetime, timedelta import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from", "%H:%M:%S\") start_date = datetime.utcnow() end_date = start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body", "= datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid", "datetime.utcnow() end_date = start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name,", "timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate':", "test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end", "datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date')", "'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status()", "from config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y,", "+ timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\",", "'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules", "collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date = start_date", "datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or", "requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection", "end_date = start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId':", "missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'], {'test': 'passed'},", "import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config import Config def add_collex(survey_id, collection_instrument_selection_rules):", "'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'],", "acceptance_tests.utilities.test_case_helper import test_helper from config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME>", "= f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate':", "= '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date = start_date +", "\"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing", "= {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test':", "datetime import datetime, timedelta import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import", "survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'],", "datetime, timedelta import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from", "response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name')", "get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'],", "\"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response =", "collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update()", "response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected", "start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id, 'reference':", "response = requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name,", "reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or", "name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'],", "datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date = start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises'", "test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\")", "from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config import Config def", "json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise", "url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z',", "def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow()", "'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response", "ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\")", "= datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start", "start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date')", "or missing start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'], {'test':", "start date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'], {'test': 'passed'}, 'Unexpected", "= requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected", "test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\",", "+ datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date = start_date + timedelta(days=2) url =", "collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date", "import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date", "add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date", "'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url,", "} response = requests.post(url, json=body) response.raise_for_status() collex_id = response.json() collection_exercise_update_event = get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'],", "get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name", "end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'], {'test': 'passed'}, 'Unexpected metadata') return collex_id", "test_helper from config import Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' +", "= datetime.utcnow() end_date = start_date + timedelta(days=2) url = f'{Config.SUPPORT_TOOL_API}/collectionExercises' body = {'name':", "test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'], {'test': 'passed'}, 'Unexpected metadata') return", "import datetime, timedelta import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper", "body = {'name': collex_name, 'surveyId': survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata':", "\"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date,", "' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date = start_date + timedelta(days=2) url", "test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date,", "'<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date = datetime.utcnow() end_date = start_date + timedelta(days=2)", "parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing start date') test_helper.assertEqual(parsed_end_date, end_date,", "survey_id, 'reference': \"MVP012021\", 'startDate': f'{start_date.isoformat()}Z', 'endDate': f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules }", "= get_emitted_collection_exercise_update() test_helper.assertEqual(collection_exercise_update_event['name'], collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID')", "f'{end_date.isoformat()}Z', 'metadata': {'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status() collex_id", "parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date, 'Invalid or missing", "collex_name, 'Unexpected collection exercise name') test_helper.assertEqual(collection_exercise_update_event['surveyId'], survey_id, 'Unexpected survey ID') test_helper.assertEqual(collection_exercise_update_event['reference'], \"MVP012021\", 'Unexpected", "date') test_helper.assertEqual(parsed_end_date, end_date, 'Invalid or missing end date') test_helper.assertEqual(collection_exercise_update_event['metadata'], {'test': 'passed'}, 'Unexpected metadata')", "from datetime import datetime, timedelta import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper", "Config def add_collex(survey_id, collection_instrument_selection_rules): collex_name = '<NAME> ' + datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\") start_date =", "timedelta import requests from acceptance_tests.utilities.event_helper import get_emitted_collection_exercise_update from acceptance_tests.utilities.test_case_helper import test_helper from config", "{'test': 'passed'}, 'collectionInstrumentSelectionRules': collection_instrument_selection_rules } response = requests.post(url, json=body) response.raise_for_status() collex_id = response.json()", "\"MVP012021\", 'Unexpected reference') parsed_start_date = datetime.strptime(collection_exercise_update_event['startDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") parsed_end_date = datetime.strptime(collection_exercise_update_event['endDate'], \"%Y-%m-%dT%H:%M:%S.%fZ\") test_helper.assertEqual(parsed_start_date, start_date," ]
[ "api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name]", "@api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler:", "@api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser)", "command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\")", "@api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider", "def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler =", "handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404,", "= light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\")", "class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get(", "= command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's", "detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not", "identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\")", "Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\"", "], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non detections \"\"\"", ".models import ( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import inject, Provide from", "identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\")", "identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\")", "@api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model,", "@api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\",", "\"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject", "core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api =", ") api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model", "ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non detections \"\"\" survey_id =", "AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections and", "non_detection_model, ) from dependency_injector.wiring import inject, Provide from dependency_injector.providers import Factory from api.container", "AppContainer from shared.interface.command import Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload", "= Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections and non detections \"\"\" survey_id", "found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def", "Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model", "related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\",", "ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self,", "@api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"])", "@set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command]", "Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command", "def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler =", "import Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import", "api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\")", "Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections and non detections \"\"\" survey_id =", "= Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets", "get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[", "from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api", "handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404,", "@api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[", "= Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"]", "@api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser)", "], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections and non", "): \"\"\" Gets detections and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command =", "command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\")", "], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the detections \"\"\"", "result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource):", "Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler,", "Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler,", "@api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider", "command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ):", "LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve", "from flask_restx import Namespace, Resource from .parsers import survey_id_parser from .models import (", "light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import inject, Provide from dependency_injector.providers import Factory", "api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\")", "@set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id,", "\"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory:", "dependency_injector.wiring import inject, Provide from dependency_injector.providers import Factory from api.container import AppContainer from", "@set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command]", "from .models import ( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import inject, Provide", "Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\"", "import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator,", "skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ],", "survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\",", "object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator", "check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] =", "AppContainer.view_result_handler ], ): \"\"\" Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command =", "set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name]", "command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The", "light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200,", "Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non", "command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ):", "( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] =", "\"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return", "skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ],", "import ( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import inject, Provide from dependency_injector.providers", "ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections and non detections \"\"\"", "\"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"])", "def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler =", "and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler,", "): \"\"\" Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id,", "@api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"])", "AppContainer.view_result_handler ], ): \"\"\" Gets detections and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"]", ") from dependency_injector.wiring import inject, Provide from dependency_injector.providers import Factory from api.container import", "@api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler:", "\"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject", "import survey_id_parser from .models import ( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import", "NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self,", "AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the detections", "object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator", "from shared.interface.command import Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from", "@set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command]", "], ): \"\"\" Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory(", "\"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True)", "result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non detections \"\"\" survey_id", "from dependency_injector.wiring import inject, Provide from dependency_injector.providers import Factory from api.container import AppContainer", "import Factory from api.container import AppContainer from shared.interface.command import Command from shared.interface.command import", "operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The", "result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the detections \"\"\" survey_id", "survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\")", "\"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"])", "( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import inject, Provide from dependency_injector.providers import", "id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ],", "@inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler", "self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler", "command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\")", "@api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\",", "@api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model,", "\"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True)", "], ): \"\"\" Gets detections and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command", "from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator,", "@set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id,", "Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections", "non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource):", "Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\"", "@check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] =", "\"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory:", "class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get(", "detections and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id),", "Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command", "return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class", "Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import (", "= Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"]", "\"\"\" Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id),", "detection_model, non_detection_model, ) from dependency_injector.wiring import inject, Provide from dependency_injector.providers import Factory from", "@api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\",", "@set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator @api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id,", ") command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not", "skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ],", "non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, )", "@api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler:", "Provide from dependency_injector.providers import Factory from api.container import AppContainer from shared.interface.command import Command", "= Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just", "Gets detections and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id,", "= command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's", "\"\"\" Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id),", "survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result", "\"\"\" Gets detections and non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory(", "api.container import AppContainer from shared.interface.command import Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service", "@api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser)", "\"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory:", "@api.doc(\"non_detections\") @api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[", "get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler = Provide[", "import AppContainer from shared.interface.command import Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import", "ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related operations\")", "description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\")", "survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\",", "Namespace, Resource from .parsers import survey_id_parser from .models import ( light_curve_model, detection_model, non_detection_model,", "return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class", "self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler", "= survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\")", "from dependency_injector.providers import Factory from api.container import AppContainer from shared.interface.command import Command from", "payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200,", "ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, )", "self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler", "class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get(", "= non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class", "Resource from .parsers import survey_id_parser from .models import ( light_curve_model, detection_model, non_detection_model, )", "from .parsers import survey_id_parser from .models import ( light_curve_model, detection_model, non_detection_model, ) from", "@check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] =", "survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\")", "@inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler", "import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\",", "@api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[", "the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, )", "= Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just", "shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator,", "command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\")", "= survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/non_detections\")", "\"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject", "\"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True)", "AppContainer.view_result_handler ], ): \"\"\" Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command =", "= Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] =", "ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the detections \"\"\" survey_id =", "= detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404,", "result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Gets detections and non detections", ") command.execute() return result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not", "id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ],", "@api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"])", "survey_id_parser from .models import ( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring import inject,", "@api.response(200, \"Success\") @api.response(404, \"Not found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model,", "object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_non_detections\"]) @check_permissions_decorator", "], ): \"\"\" Just non detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory(", "import inject, Provide from dependency_injector.providers import Factory from api.container import AppContainer from shared.interface.command", "import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name]", "inject, Provide from dependency_injector.providers import Factory from api.container import AppContainer from shared.interface.command import", "command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The", "from api.container import AppContainer from shared.interface.command import Command from shared.interface.command import ResultHandler from", "\"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"])", "id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ],", "set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related operations\") api.models[light_curve_model.name] = light_curve_model", "@check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] =", "Factory from api.container import AppContainer from shared.interface.command import Command from shared.interface.command import ResultHandler", "result_handler.result @api.route(\"/<id>/non_detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200, \"Success\") @api.response(404, \"Not found\") class NonDetections(Resource):", "flask_restx import Namespace, Resource from .parsers import survey_id_parser from .models import ( light_curve_model,", "@inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_lightcurve_command.provider ], result_handler: ResultHandler", "Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just the", "): \"\"\" Just the detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id,", "found\") class ObjectDetections(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_detections\"]) @check_permissions_decorator @api.doc(\"detections\") @api.marshal_list_with(detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def", "from ralidator_flask.decorators import ( set_permissions_decorator, set_filters_decorator, check_permissions_decorator, ) api = Namespace(\"lightcurve\", description=\"LightCurve related", "found\") class LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def", "detections \"\"\" survey_id = survey_id_parser.parse_args()[\"survey_id\"] command = command_factory( payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute()", "@api.marshal_list_with(non_detection_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_non_detections_command.provider", "AppContainer.lightcurve_package.get_non_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ): \"\"\" Just non detections", "import Namespace, Resource from .parsers import survey_id_parser from .models import ( light_curve_model, detection_model,", "LightCurve(Resource): @set_permissions_decorator([\"admin\", \"basic_user\"]) @set_filters_decorator([\"filter_atlas_lightcurve\"]) @check_permissions_decorator @api.doc(\"lightcurve\") @api.marshal_with(light_curve_model, skip_none=True) @api.expect(survey_id_parser) @inject def get( self,", "payload=LightcurveServicePayload(id, survey_id), handler=result_handler, ) command.execute() return result_handler.result @api.route(\"/<id>/detections\") @api.param(\"id\", \"The object's identifier\") @api.response(200,", "dependency_injector.providers import Factory from api.container import AppContainer from shared.interface.command import Command from shared.interface.command", "command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[ AppContainer.view_result_handler ], ):", "api.models[light_curve_model.name] = light_curve_model api.models[detection_model.name] = detection_model api.models[non_detection_model.name] = non_detection_model @api.route(\"/<id>/lightcurve\") @api.param(\"id\", \"The object's", ".parsers import survey_id_parser from .models import ( light_curve_model, detection_model, non_detection_model, ) from dependency_injector.wiring", "shared.interface.command import Command from shared.interface.command import ResultHandler from core.light_curve.domain.lightcurve_service import LightcurveServicePayload from ralidator_flask.decorators", "get( self, id, command_factory: Factory[Command] = Provide[ AppContainer.lightcurve_package.get_detections_command.provider ], result_handler: ResultHandler = Provide[" ]
[ "basictypes package is to provide types which provide enough metadata to allow an", "\"\"\"Common data-modeling Python types The idea of the basictypes package is to provide", "to provide types which provide enough metadata to allow an application to use", "provide types which provide enough metadata to allow an application to use introspection", "the basictypes package is to provide types which provide enough metadata to allow", "which provide enough metadata to allow an application to use introspection to perform", "types which provide enough metadata to allow an application to use introspection to", "The idea of the basictypes package is to provide types which provide enough", "application to use introspection to perform much of the housekeeping required to create", "allow an application to use introspection to perform much of the housekeeping required", "is to provide types which provide enough metadata to allow an application to", "to use introspection to perform much of the housekeeping required to create business", "an application to use introspection to perform much of the housekeeping required to", "of the basictypes package is to provide types which provide enough metadata to", "types The idea of the basictypes package is to provide types which provide", "idea of the basictypes package is to provide types which provide enough metadata", "introspection to perform much of the housekeeping required to create business applications. \"\"\"", "to allow an application to use introspection to perform much of the housekeeping", "use introspection to perform much of the housekeeping required to create business applications.", "metadata to allow an application to use introspection to perform much of the", "provide enough metadata to allow an application to use introspection to perform much", "data-modeling Python types The idea of the basictypes package is to provide types", "enough metadata to allow an application to use introspection to perform much of", "Python types The idea of the basictypes package is to provide types which", "package is to provide types which provide enough metadata to allow an application" ]
[ "import random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return", "= discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 -", "G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y) D_real, D_logit_real = discriminator(X,", "D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\"", "Create random batches def rand_batch(size): global Train global Add s_size = Train.shape[0] mybatch", "shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim]))", "+ D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model \"\"\" Z", "rn not in mybatch: mybatch.append(rn) count +=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i])", "Discriminator Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None,", "## Create random batches def rand_batch(size): global Train global Add s_size = Train.shape[0]", "model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim]))", "tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4", "+ y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim]))", "tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver", "D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3)", "epoch_idx in range(num_epochs): if epoch_idx % 10000 == 0: n_sample = 1 Z_sample", "in range(num_epochs): if epoch_idx % 10000 == 0: n_sample = 1 Z_sample =", "G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D", "## Noise for the GAN def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n])", "a specific directory if not os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create random", "G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample =", "= tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net", "Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] =", "i = 0 ## Create random batches def rand_batch(size): global Train global Add", "tensorflow.examples.tutorials.mnist import input_data import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec", "D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return", "(tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss =", "D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model \"\"\" Z =", "= tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2,", "G_b2, G_b3, G_b4] def generator(z, y): inputs = tf.concat(axis=1, values=[z, y]) G_h1 =", "tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim])", "= Train.shape[1] ## Number of epochs num_epochs = 100000 y_dim = Add.shape[1] ##", "npzfile[\"add\"] ## Batch Size mb_size = 20 ## Noise Dimension Z_dim = 10000", "= 10000 X_dim = Train.shape[1] ## Number of epochs num_epochs = 100000 y_dim", "+ y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim]))", "0 X_mb = [] y_mb = [] while count < size: rn =", "'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for the GAN def sample_Z(m, n):", "D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit", "## Learning Rate lr = 0.1 ## For putting outputs in a specific", "G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z, y): inputs = tf.concat(axis=1,", "Add s_size = Train.shape[0] mybatch = [] count = 0 X_mb = []", "random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn) count +=1 for i in mybatch:", "= sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z:", "for the GAN def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n]) ## Load", "= [] y_mb = [] while count < size: rn = random.randint(0,s_size-1) if", "h2_dim = 500 h3_dim = 250 random.seed() ## Learning Rate lr = 0.1", "D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model \"\"\"", "h_dim = 1000 h2_dim = 500 h3_dim = 250 random.seed() ## Learning Rate", "y) D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real =", "plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for the GAN def sample_Z(m, n): return", "y_mb = [] while count < size: rn = random.randint(0,s_size-1) if rn not", "return np.random.uniform(-100., 100., size=[m, n]) ## Load the Data npzfile = np.load(\"xSet.npz\") Train=", "input_data import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec", "X_dim = Train.shape[1] ## Number of epochs num_epochs = 100000 y_dim = Add.shape[1]", "plt import matplotlib.gridspec as gridspec import os import random def plot(samples): fig =", "tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2", "D_b1, D_b2, D_b3, D_b4] def discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y]) D_h1", "Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver,", "sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx))", "Z_sample, y:y_sample}) print samples fig = plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample =", "y): inputs = tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2", "Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size mb_size", "Train.shape[0] mybatch = [] count = 0 X_mb = [] y_mb = []", "y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) ))", "= [] while count < size: rn = random.randint(0,s_size-1) if rn not in", "[D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x, y): inputs =", "20 ## Noise Dimension Z_dim = 10000 X_dim = Train.shape[1] ## Number of", "def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n]) ## Load the Data npzfile", "tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3,", "## Load the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ##", "numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import os", "y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2)", "= tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2", "D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4,", "model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1", "Learning Rate lr = 0.1 ## For putting outputs in a specific directory", "G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y) D_real,", "G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3", "= tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim]))", "tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model", "= 100000 y_dim = Add.shape[1] ## Hidden dimensions h_dim = 1000 h2_dim =", "tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2,", "mybatch = [] count = 0 X_mb = [] y_mb = [] while", "sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D =", "Rate lr = 0.1 ## For putting outputs in a specific directory if", "tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G", "= 0.1 ## For putting outputs in a specific directory if not os.path.exists('out/'):", "random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig", "D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss,", "h3_dim = 250 random.seed() ## Learning Rate lr = 0.1 ## For putting", "G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1,", "values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) +", "= tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1,", "tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2)", "Batch Size mb_size = 20 ## Noise Dimension Z_dim = 10000 X_dim =", "= [] count = 0 X_mb = [] y_mb = [] while count", "\"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1", "plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for the GAN def", "= np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size mb_size = 20", "sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx % 10000 == 0: n_sample =", "D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator", "= random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn) count +=1 for i in", "import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import os import random def", "epoch_idx % 10000 == 0: n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample", "= 500 h3_dim = 250 random.seed() ## Learning Rate lr = 0.1 ##", "tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx", "[] y_mb = [] while count < size: rn = random.randint(0,s_size-1) if rn", "in_dim = size[0] xavier_stddev = 1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev)", "mybatch.append(rn) count +=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size):", "- D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake", "= tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 =", "= sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0", "import os import random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0)", "samples fig = plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B", "100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B)) print('G loss: {}'.format(D)) print() print(D_W1.eval())", "G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return", "= Train.shape[0] mybatch = [] count = 0 X_mb = [] y_mb =", "discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10))", "directory if not os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create random batches def", "10000 X_dim = Train.shape[1] ## Number of epochs num_epochs = 100000 y_dim =", "G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim,", "if epoch_idx % 10000 == 0: n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim)", "0.1 ## For putting outputs in a specific directory if not os.path.exists('out/'): os.makedirs('out/')", "(X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev = 1. / tf.sqrt(in_dim / 2.)", "rand_batch(size): global Train global Add s_size = Train.shape[0] mybatch = [] count =", "Add.shape[1] ## Hidden dimensions h_dim = 1000 h2_dim = 500 h3_dim = 250", "y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3", "shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 =", "in a specific directory if not os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create", "tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x,", "= tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1]))", "= generator(Z, y) D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y)", "tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3,", "tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4", "1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\"", "D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim]))", "sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples", "D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss =", "\"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 =", "tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2,", "G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z, y): inputs = tf.concat(axis=1, values=[z, y])", "Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim])", "/ 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X = tf.placeholder(tf.float32,", "2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None,", "= [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x, y): inputs", "xavier_init(size): in_dim = size[0] xavier_stddev = 1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size,", "+ D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit", "0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig", "G_log_prob G_sample = generator(Z, y) D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake =", "tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y) D_real, D_logit_real =", "values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) +", "100., size=[m, n]) ## Load the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add", "tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3", "D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob", "G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z, y): inputs = tf.concat(axis=1, values=[z,", "= plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver,", "X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X:", "D_b2, D_b3, D_b4] def discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y]) D_h1 =", "size: rn = random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn) count +=1 for", "Hidden dimensions h_dim = 1000 h2_dim = 500 h3_dim = 250 random.seed() ##", "batches def rand_batch(size): global Train global Add s_size = Train.shape[0] mybatch = []", "C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100 == 0:", "= sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D", "D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables())", "as tf from tensorflow.examples.tutorials.mnist import input_data import numpy as np import matplotlib.pyplot as", "< size: rn = random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn) count +=1", "= sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig = plot(samples) X_mb, y_mb =", "y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev = 1. / tf.sqrt(in_dim", "G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob", "G_b1, G_b2, G_b3, G_b4] def generator(z, y): inputs = tf.concat(axis=1, values=[z, y]) G_h1", "generator(z, y): inputs = tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1)", "tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2,", "+ G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob =", "50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig = plot(samples) X_mb,", "gridspec import os import random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black'])", "250 random.seed() ## Learning Rate lr = 0.1 ## For putting outputs in", "= npzfile[\"add\"] ## Batch Size mb_size = 20 ## Noise Dimension Z_dim =", "= tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1,", "tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample", "G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob", "y_dim = Add.shape[1] ## Hidden dimensions h_dim = 1000 h2_dim = 500 h3_dim", "return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim])", "in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev =", "tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2", "GAN def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n]) ## Load the Data", "return D_prob,D_logit \"\"\" Generator Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1", "= tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim]))", "D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake D_solver", "with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx % 10000", "G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 =", "+=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim =", "+ D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4) +", "n]) ## Load the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"]", "outputs in a specific directory if not os.path.exists('out/'): os.makedirs('out/') i = 0 ##", "tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4", "return fig ## Noise for the GAN def sample_Z(m, n): return np.random.uniform(-100., 100.,", "os.makedirs('out/') i = 0 ## Create random batches def rand_batch(size): global Train global", "= size[0] xavier_stddev = 1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\"", "D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(-", "npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size mb_size = 20 ## Noise Dimension", "h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 =", "num_epochs = 100000 y_dim = Add.shape[1] ## Hidden dimensions h_dim = 1000 h2_dim", "D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3)", "= 250 random.seed() ## Learning Rate lr = 0.1 ## For putting outputs", "plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for the GAN", "import tensorflow as tf from tensorflow.examples.tutorials.mnist import input_data import numpy as np import", "= tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit =", "y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 =", "matplotlib.gridspec as gridspec import os import random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue',", "np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size mb_size = 20 ##", "h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 =", "def generator(z, y): inputs = tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) +", "count < size: rn = random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn) count", "rn = random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn) count +=1 for i", "X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1", "var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx %", "n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] =", "tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4", "h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 =", "= tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D)", "G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 =", "D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 =", "Z_sample, y:y_mb}) if epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B))", "D_b3, D_b4] def discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs,", "D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess:", "/ tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X", "Noise for the GAN def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n]) ##", "range(num_epochs): if epoch_idx % 10000 == 0: n_sample = 1 Z_sample = sample_Z(n_sample,", "sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample,", "= tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y) D_real, D_logit_real = discriminator(X, y)", "= tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def", "= tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob =", "in mybatch: mybatch.append(rn) count +=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb)", "= tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim]))", "D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x, y): inputs = tf.concat(axis=1, values=[x,", "s_size = Train.shape[0] mybatch = [] count = 0 X_mb = [] y_mb", "y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2)", "tf from tensorflow.examples.tutorials.mnist import input_data import numpy as np import matplotlib.pyplot as plt", "X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx", "y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample})", "if not os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create random batches def rand_batch(size):", "np.random.uniform(-100., 100., size=[m, n]) ## Load the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"]", "G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3)", "global Train global Add s_size = Train.shape[0] mybatch = [] count = 0", "G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs):", "G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim]))", "Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size mb_size = 20 ## Noise", "lr = 0.1 ## For putting outputs in a specific directory if not", "For putting outputs in a specific directory if not os.path.exists('out/'): os.makedirs('out/') i =", "tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim,", "mb_size = 20 ## Noise Dimension Z_dim = 10000 X_dim = Train.shape[1] ##", "= tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def", "n): return np.random.uniform(-100., 100., size=[m, n]) ## Load the Data npzfile = np.load(\"xSet.npz\")", "tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob = tf.nn.sigmoid(D_logit)", "discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1)", "Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100", "Generator Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim +", "samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig = plot(samples) X_mb, y_mb", "os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create random batches def rand_batch(size): global Train", "tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3,", "random.seed() ## Learning Rate lr = 0.1 ## For putting outputs in a", "tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y", "matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import os import random def plot(samples):", "= tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim +", "h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D =", "def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ##", "xavier_stddev = 1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net", "tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4", "D_logit_real = discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1", "D_prob,D_logit \"\"\" Generator Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 =", "h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G =", "D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim,", "[] count = 0 X_mb = [] y_mb = [] while count <", "print samples fig = plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim)", "A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss],", "D_prob = tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model \"\"\" Z = tf.placeholder(tf.float32,", "from tensorflow.examples.tutorials.mnist import input_data import numpy as np import matplotlib.pyplot as plt import", "= tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1,", "feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss:", "return G_log_prob G_sample = generator(Z, y) D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake", "random batches def rand_batch(size): global Train global Add s_size = Train.shape[0] mybatch =", "= 1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model", "D_loss = D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss,", "putting outputs in a specific directory if not os.path.exists('out/'): os.makedirs('out/') i = 0", "== 0: n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim])", "= tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2", "import matplotlib.gridspec as gridspec import os import random def plot(samples): fig = plt.figure()", "discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10)", "+ G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) +", "= 1000 h2_dim = 500 h3_dim = 250 random.seed() ## Learning Rate lr", "+ G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z,", "Number of epochs num_epochs = 100000 y_dim = Add.shape[1] ## Hidden dimensions h_dim", "generator(Z, y) D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real", "D_b4] def discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1)", "1000 h2_dim = 500 h3_dim = 250 random.seed() ## Learning Rate lr =", "= 0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples", "Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim,", "as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import os import", "== 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B)) print('G loss: {}'.format(D)) print() print(D_W1.eval()) print(G_W1.eval())", "return (X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev = 1. / tf.sqrt(in_dim /", "= sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100 == 0: print('Iter:", "= tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y) D_real, D_logit_real", "as plt import matplotlib.gridspec as gridspec import os import random def plot(samples): fig", "fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for", "def discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) +", "tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx % 10000 ==", "h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 =", "X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev = 1. /", "theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z, y):", "the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size", "D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3", "def xavier_init(size): in_dim = size[0] xavier_stddev = 1. / tf.sqrt(in_dim / 2.) return", "= tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim]))", "tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss", "= D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G)", "i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev", "Z_dim = 10000 X_dim = Train.shape[1] ## Number of epochs num_epochs = 100000", "## Number of epochs num_epochs = 100000 y_dim = Add.shape[1] ## Hidden dimensions", "y): inputs = tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2", "tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X =", "npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch Size mb_size =", "% 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B)) print('G loss: {}'.format(D)) print()", "= 0 X_mb = [] y_mb = [] while count < size: rn", "= plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for the", "y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample,", "dimensions h_dim = 1000 h2_dim = 500 h3_dim = 250 random.seed() ## Learning", "np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample,", "inputs = tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 =", "tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z,", "for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim = size[0]", "= D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as", "= tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2,", "[G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z, y): inputs =", "+ D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) +", "feed_dict={Z: Z_sample, y:y_sample}) print samples fig = plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample", "G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob)", "import input_data import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as", "= tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim, X_dim]))", "tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2)", "= tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 = tf.nn.tanh(tf.matmul(D_h1, D_W2) + D_b2) D_h3 =", "specific directory if not os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create random batches", "tf.nn.tanh(tf.matmul(inputs, G_W1) + G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2,", "G_sample = generator(Z, y) D_real, D_logit_real = discriminator(X, y) D_fake, D_logit_fake = discriminator(G_sample,", "0 ## Create random batches def rand_batch(size): global Train global Add s_size =", "D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with", "= 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0", "D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake =", "D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x, y): inputs = tf.concat(axis=1, values=[x, y])", "tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3", "= [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4] def generator(z, y): inputs", "G_b1) G_h2 = tf.nn.tanh(tf.matmul(G_h1, G_W2) + G_b2) G_h3 = tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3)", "= tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1]))", "mybatch: mybatch.append(rn) count +=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def", "stddev=xavier_stddev) \"\"\" Discriminator Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y =", "= 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig = plot(samples)", "D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x, y): inputs = tf.concat(axis=1,", "500 h3_dim = 250 random.seed() ## Learning Rate lr = 0.1 ## For", "1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3,", "Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb})", "X = tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32, shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim", "count +=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim", "Noise Dimension Z_dim = 10000 X_dim = Train.shape[1] ## Number of epochs num_epochs", "count = 0 X_mb = [] y_mb = [] while count < size:", "100000 y_dim = Add.shape[1] ## Hidden dimensions h_dim = 1000 h2_dim = 500", "Dimension Z_dim = 10000 X_dim = Train.shape[1] ## Number of epochs num_epochs =", "the GAN def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n]) ## Load the", "inputs = tf.concat(axis=1, values=[x, y]) D_h1 = tf.nn.tanh(tf.matmul(inputs, D_W1) + D_b1) D_h2 =", "Train global Add s_size = Train.shape[0] mybatch = [] count = 0 X_mb", "Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples =", "tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y) D_real, D_logit_real = discriminator(X, y) D_fake,", "= 0 ## Create random batches def rand_batch(size): global Train global Add s_size", "feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if", "= tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if", "tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D", "G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim])) G_W4 = tf.Variable(xavier_init([h3_dim,", "fig = plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B =", "tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 =", "= tf.nn.tanh(tf.matmul(G_h2, G_W3) + G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob", "Add = npzfile[\"add\"] ## Batch Size mb_size = 20 ## Noise Dimension Z_dim", "G_b3, G_b4] def generator(z, y): inputs = tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs,", "D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4]", "## Batch Size mb_size = 20 ## Noise Dimension Z_dim = 10000 X_dim", "D_W2) + D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4)", "<gh_stars>1-10 import tensorflow as tf from tensorflow.examples.tutorials.mnist import input_data import numpy as np", "D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) D_b3 =", "y:y_mb}) if epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B)) print('G", "os import random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show()", "= 20 ## Noise Dimension Z_dim = 10000 X_dim = Train.shape[1] ## Number", "= tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim]))", "of epochs num_epochs = 100000 y_dim = Add.shape[1] ## Hidden dimensions h_dim =", "import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import", "D_b3 = tf.Variable(tf.ones(shape=[h3_dim])) D_W4 = tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1,", "Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim])) G_W2 = tf.Variable(xavier_init([h_dim,", "global Add s_size = Train.shape[0] mybatch = [] count = 0 X_mb =", ")) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake D_solver =", "\"\"\" Discriminator Net model \"\"\" X = tf.placeholder(tf.float32, shape=[None, X_dim]) y = tf.placeholder(tf.float32,", "= np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z:", "size[0] xavier_stddev = 1. / tf.sqrt(in_dim / 2.) return tf.random_normal(shape=size, stddev=xavier_stddev) \"\"\" Discriminator", "G_b3) G_log_prob = tf.matmul(G_h3,G_W4)+G_b4 G_prob = tf.nn.sigmoid(G_log_prob) return G_log_prob G_sample = generator(Z, y)", "= tf.nn.sigmoid(D_logit) return D_prob,D_logit \"\"\" Generator Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None,", "y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3", "sample_Z(m, n): return np.random.uniform(-100., 100., size=[m, n]) ## Load the Data npzfile =", "y:y_sample}) print samples fig = plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size,", "if rn not in mybatch: mybatch.append(rn) count +=1 for i in mybatch: X_mb.append(Train[i])", "0: n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0]", "for epoch_idx in range(num_epochs): if epoch_idx % 10000 == 0: n_sample = 1", "np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import os import random", "epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B)) print('G loss: {}'.format(D))", "not os.path.exists('out/'): os.makedirs('out/') i = 0 ## Create random batches def rand_batch(size): global", "D_fake, D_logit_fake = discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake", "D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 = tf.Variable(tf.ones(shape=[h2_dim])) D_W3 = tf.Variable(xavier_init([h2_dim,", "= tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss = D_loss_real", "if epoch_idx % 100 == 0: print('Iter: {}'.format(epoch_idx)) print('D loss: {}'.format(B)) print('G loss:", "1 Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample, y_dim]) y_sample[0][0] = 0.0 y_sample[0][1]", "y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx % 100 ==", "## Hidden dimensions h_dim = 1000 h2_dim = 500 h3_dim = 250 random.seed()", "plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise", "Train.shape[1] ## Number of epochs num_epochs = 100000 y_dim = Add.shape[1] ## Hidden", "= tf.Variable(xavier_init([h_dim, h2_dim])) G_b2 = tf.Variable(tf.zeros(shape=[h2_dim])) G_W3 = tf.Variable(xavier_init([h2_dim, h3_dim])) G_b3 = tf.Variable(tf.zeros(shape=[h3_dim]))", "def rand_batch(size): global Train global Add s_size = Train.shape[0] mybatch = [] count", "'red','green', 'black']) plt.plot(samples[0],linewidth=2.0) plt.show() return fig ## Noise for the GAN def sample_Z(m,", "Load the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add = npzfile[\"add\"] ## Batch", "= tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 = tf.Variable(tf.zeros(shape=[h_dim]))", "tensorflow as tf from tensorflow.examples.tutorials.mnist import input_data import numpy as np import matplotlib.pyplot", "G_W4 = tf.Variable(xavier_init([h3_dim, X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4,", "while count < size: rn = random.randint(0,s_size-1) if rn not in mybatch: mybatch.append(rn)", "y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(- tf.log(D_fake+1e-10)) D_loss", "## For putting outputs in a specific directory if not os.path.exists('out/'): os.makedirs('out/') i", "y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb,", "Size mb_size = 20 ## Noise Dimension Z_dim = 10000 X_dim = Train.shape[1]", "G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3, G_b4]", "size=[m, n]) ## Load the Data npzfile = np.load(\"xSet.npz\") Train= npzfile[\"train\"] Add =", "tf.log(D_fake+1e-10)) D_loss = D_loss_real G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver =", "10000 == 0: n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample = np.ones(shape=[n_sample,", "= discriminator(G_sample, y) D_loss_real = tf.reduce_mean(- (tf.log((1 - D_fake)+1e-10)+tf.log(D_real+1e-10) )) D_loss_fake = tf.reduce_mean(-", "epochs num_epochs = 100000 y_dim = Add.shape[1] ## Hidden dimensions h_dim = 1000", "X_dim])) G_b4 = tf.Variable(tf.zeros(shape=[X_dim])) theta_G = [G_W1, G_W2, G_W3, G_W4, G_b1, G_b2, G_b3,", "tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx", "shape=[None, y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 =", "sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx % 10000 == 0: n_sample", "[] while count < size: rn = random.randint(0,s_size-1) if rn not in mybatch:", "X_mb = [] y_mb = [] while count < size: rn = random.randint(0,s_size-1)", "\"\"\" Generator Net model \"\"\" Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim", "D_loss], feed_dict={X: X_mb, Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb})", "as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in range(num_epochs): if epoch_idx % 10000 == 0:", "Z: Z_sample, y:y_mb}) C,D = sess.run([G_solver, G_loss], feed_dict={Z: Z_sample, y:y_mb}) if epoch_idx %", "fig ## Noise for the GAN def sample_Z(m, n): return np.random.uniform(-100., 100., size=[m,", "= Add.shape[1] ## Hidden dimensions h_dim = 1000 h2_dim = 500 h3_dim =", "= tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4 D_prob =", "as gridspec import os import random def plot(samples): fig = plt.figure() plt.gca().set_color_cycle(['blue', 'red','green',", "sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig = plot(samples) X_mb, y_mb = rand_batch(mb_size)", "mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return (X_mb,y_mb) def xavier_init(size): in_dim = size[0] xavier_stddev = 1.", "y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print samples fig =", "Z = tf.placeholder(tf.float32, shape=[None, Z_dim]) G_W1 = tf.Variable(xavier_init([Z_dim + y_dim, h_dim])) G_b1 =", "D_b2) D_h3 = tf.nn.tanh(tf.matmul(D_h2, D_W3) + D_b3) D_logit = tf.matmul(D_h3, D_W4) + D_b4", "= tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for", "G_loss = D_loss_fake D_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(D_loss, var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session()", "= rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z:", "var_list=theta_D) G_solver = tf.train.AdagradOptimizer(learning_rate=lr).minimize(G_loss, var_list=theta_G) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for epoch_idx in", "G_b4] def generator(z, y): inputs = tf.concat(axis=1, values=[z, y]) G_h1 = tf.nn.tanh(tf.matmul(inputs, G_W1)", "theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1, D_b2, D_b3, D_b4] def discriminator(x, y):", "% 10000 == 0: n_sample = 1 Z_sample = sample_Z(n_sample, Z_dim) y_sample =", "= tf.Variable(xavier_init([h3_dim, 1])) D_b4 = tf.Variable(tf.ones(shape=[1])) theta_D = [D_W1, D_W2, D_W3, D_W4, D_b1,", "tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim, h2_dim])) D_b2 =", "## Noise Dimension Z_dim = 10000 X_dim = Train.shape[1] ## Number of epochs", "y_dim]) D_W1 = tf.Variable(xavier_init([X_dim + y_dim, h_dim])) D_b1 = tf.Variable(tf.ones(shape=[h_dim])) D_W2 = tf.Variable(xavier_init([h_dim,", "rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss], feed_dict={X: X_mb, Z: Z_sample,", "not in mybatch: mybatch.append(rn) count +=1 for i in mybatch: X_mb.append(Train[i]) y_mb.append(Add[i]) return", "plt.show() return fig ## Noise for the GAN def sample_Z(m, n): return np.random.uniform(-100.,", "y_sample[0][0] = 0.0 y_sample[0][1] = 50.0 samples = sess.run(G_sample, feed_dict={Z: Z_sample, y:y_sample}) print", "plot(samples) X_mb, y_mb = rand_batch(mb_size) Z_sample = sample_Z(mb_size, Z_dim) A,B = sess.run([D_solver, D_loss]," ]
[ "def generate_catalog_if_needed(): # helper method to check if catalog is present, and generate", "Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language", "py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData,", "setup command to generate fresh catalog and schemas''' user_options = [] def initialize_options(self):", "and all downloaded schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML", "Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language", "in pip-dev-req.txt # for generating documentation on readthedocs.org # unittest2 should only be", "remove schema files and XML catalog\" def run(self): # remove schema data and", "to delete build and schema files\"\"\" description = \"Custom clean command; remove schema", "sdist from distutils.core import Command import os import sys import shutil from setuptools", "if it's there with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements", "Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English',", "] LONG_DESCRIPTION = None try: # read the description if it's there with", "should be duplicated in pip-dev-req.txt # for generating documentation on readthedocs.org # unittest2", "class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema catalog is included.\"\"\" def run(self):", "and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML", "requirements should be duplicated in pip-dev-req.txt # for generating documentation on readthedocs.org #", "System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python ::", "Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python", "except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests',", "'tox', 'requests', ] # NOTE: dev requirements should be duplicated in pip-dev-req.txt #", "the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with Django form", "Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6',", "import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema catalog", "import clean from distutils.command.sdist import sdist from distutils.core import Command import os import", "def run(self): # importing this forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed()", "then do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class", "eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema catalog is", "user_options = [] def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass", "command to ensure schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS =", "Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming", "try: # read the description if it's there with open('README.rst') as desc_f: LONG_DESCRIPTION", "OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and lextab modules.\"\"\"", "downloaded schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding,", "as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9',", "description if it's there with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass", "and schema files\"\"\" description = \"Custom clean command; remove schema files and XML", "import build_py from distutils.command.clean import clean from distutils.command.sdist import sdist from distutils.core import", "command to delete build and schema files\"\"\" description = \"Custom clean command; remove", "setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup command to generate fresh catalog", "it's there with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements =", "'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ #", ":: Markup :: XML', ] LONG_DESCRIPTION = None try: # read the description", "'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml':", "cleanup command to delete build and schema files\"\"\" description = \"Custom clean command;", "class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build and schema files\"\"\" description =", "documentation on readthedocs.org # unittest2 should only be included for py2.6 if sys.version_info", "'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language ::", ":: Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent',", "'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming", "'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming", "Processing :: Markup :: XML', ] LONG_DESCRIPTION = None try: # read the", "'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ # include schema catalog", ":: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language ::", ":: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development ::", "'dev': dev_requirements }, package_data={'eulxml': [ # include schema catalog and all downloaded schemas", "schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with", ":: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python ::", "and lextab modules.\"\"\" def run(self): # importing this forces ply to generate parsetab/lextab", "for py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean':", "setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__,", ":: 5 - Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers', 'License ::", "Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python", "'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries ::", "eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with Django form support', long_description=LONG_DESCRIPTION, classifiers=CLASSIFIERS, )", ":: Text Processing :: Markup :: XML', ] LONG_DESCRIPTION = None try: #", "= [ 'Development Status :: 5 - Production/Stable', 'Framework :: Django', 'Intended Audience", "method to check if catalog is present, and generate if not if not", "data and then do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass", "from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method to check if", "shutil from setuptools import setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup command", "generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build", "sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog,", "options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog import generate_catalog", "Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language", "3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries", "\"\"\"Setup.py for eulxml package\"\"\" from distutils.command.build_py import build_py from distutils.command.clean import clean from", "build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema catalog is included.\"\"\" def", "version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[", "eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to", "3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4',", "build and schema files\"\"\" description = \"Custom clean command; remove schema files and", "is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status :: 5", "'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ # include schema catalog and all", "'%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with Django form support', long_description=LONG_DESCRIPTION,", "and schemas''' user_options = [] def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize", "description = \"Custom clean command; remove schema files and XML catalog\" def run(self):", "\"\"\"Extend sdist command to ensure schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self)", "LONG_DESCRIPTION = None try: # read the description if it's there with open('README.rst')", "parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema", "'Topic :: Text Processing :: Markup :: XML', ] LONG_DESCRIPTION = None try:", "import sdist from distutils.core import Command import os import sys import shutil from", "run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable', 'Framework", "Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved ::", "normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to", "for generating documentation on readthedocs.org # unittest2 should only be included for py2.6", "], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ # include", "'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural", ":: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules',", "#!/usr/bin/env python \"\"\"Setup.py for eulxml package\"\"\" from distutils.command.build_py import build_py from distutils.command.clean import", "catalog and schemas''' user_options = [] def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self):", "print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build and", "Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python", "Text Processing :: Markup :: XML', ] LONG_DESCRIPTION = None try: # read", "import sys import shutil from setuptools import setup, find_packages import eulxml class GenerateXmlCatalog(Command):", "finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed():", "from distutils.command.build_py import build_py from distutils.command.clean import clean from distutils.command.sdist import sdist from", "eulxml package\"\"\" from distutils.command.build_py import build_py from distutils.command.clean import clean from distutils.command.sdist import", "packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf':", "= \"Custom clean command; remove schema files and XML catalog\" def run(self): #", "'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(),", "['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ # include schema catalog and", "schema catalog and all downloaded schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]},", "extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ # include schema", "pass def run(self): from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method", "pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog import generate_catalog generate_catalog()", "import eulxml class GenerateXmlCatalog(Command): '''Custom setup command to generate fresh catalog and schemas'''", "def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from", "distutils.command.sdist import sdist from distutils.core import Command import os import sys import shutil", ":: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2',", "command to generate fresh catalog and schemas''' user_options = [] def initialize_options(self): \"\"\"init", ":: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language ::", "}, package_data={'eulxml': [ # include schema catalog and all downloaded schemas in the", "eulxml class GenerateXmlCatalog(Command): '''Custom setup command to generate fresh catalog and schemas''' user_options", "included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status :: 5 -", "files and XML catalog\" def run(self): # remove schema data and then do", "cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>',", "2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3',", "dev_requirements }, package_data={'eulxml': [ # include schema catalog and all downloaded schemas in", "build_py from distutils.command.clean import clean from distutils.command.sdist import sdist from distutils.core import Command", "'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>',", "'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version", "url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10',", "Modules', 'Topic :: Text Processing :: Markup :: XML', ] LONG_DESCRIPTION = None", "Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python", "'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming", "2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'],", "['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [ # include schema catalog and all downloaded", "generate_catalog_if_needed(): # helper method to check if catalog is present, and generate if", "pip-dev-req.txt # for generating documentation on readthedocs.org # unittest2 should only be included", "schema files and XML catalog\" def run(self): # remove schema data and then", "dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml',", "on readthedocs.org # unittest2 should only be included for py2.6 if sys.version_info <", "install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements },", "'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing", "to generate parsetab and lextab modules.\"\"\" def run(self): # importing this forces ply", "NOTE: dev requirements should be duplicated in pip-dev-req.txt # for generating documentation on", "should only be included for py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup(", "import os import sys import shutil from setuptools import setup, find_packages import eulxml", "if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog()", ":: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing ::", "catalog is present, and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import", "command; remove schema files and XML catalog\" def run(self): # remove schema data", "GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[", "except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and lextab", ":: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language", "GenerateXmlCatalog(Command): '''Custom setup command to generate fresh catalog and schemas''' user_options = []", "5 - Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI", "clean command; remove schema files and XML catalog\" def run(self): # remove schema", "\"\"\"Use ply to generate parsetab and lextab modules.\"\"\" def run(self): # importing this", "lextab modules.\"\"\" def run(self): # importing this forces ply to generate parsetab/lextab import", "ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command", "Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language", "'Development Status :: 5 - Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers',", "] # NOTE: dev requirements should be duplicated in pip-dev-req.txt # for generating", "dev requirements should be duplicated in pip-dev-req.txt # for generating documentation on readthedocs.org", "License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={", "# NOTE: dev requirements should be duplicated in pip-dev-req.txt # for generating documentation", "'''Custom setup command to generate fresh catalog and schemas''' user_options = [] def", ":: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic ::", "dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] #", "if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean):", "'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic", "Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic", "not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom", "\"Custom clean command; remove schema files and XML catalog\" def run(self): # remove", "be duplicated in pip-dev-req.txt # for generating documentation on readthedocs.org # unittest2 should", "and then do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self)", "< (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog':", "to generate fresh catalog and schemas''' user_options = [] def initialize_options(self): \"\"\"init options\"\"\"", "from distutils.core import Command import os import sys import shutil from setuptools import", "Markup :: XML', ] LONG_DESCRIPTION = None try: # read the description if", "Libraries :: Python Modules', 'Topic :: Text Processing :: Markup :: XML', ]", "Command import os import sys import shutil from setuptools import setup, find_packages import", "import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method to check if catalog is", "catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status ::", "= None try: # read the description if it's there with open('README.rst') as", "LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock',", "cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate", "with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5',", "'requests', ] # NOTE: dev requirements should be duplicated in pip-dev-req.txt # for", "Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming", "to check if catalog is present, and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE):", "options\"\"\" pass def run(self): from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper", "clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and lextab modules.\"\"\" def run(self):", "Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development", "parsetab and lextab modules.\"\"\" def run(self): # importing this forces ply to generate", "importing this forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist):", "if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist':", "XML', ] LONG_DESCRIPTION = None try: # read the description if it's there", "desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox',", "name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ],", ":: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language ::", "2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7',", "ensure schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development", "to ensure schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [", "}, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8',", "os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup", "os import sys import shutil from setuptools import setup, find_packages import eulxml class", "'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache", "'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements }, package_data={'eulxml': [", "try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab", "# include schema catalog and all downloaded schemas in the package '%s/*' %", "- Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved", "class GenerateXmlCatalog(Command): '''Custom setup command to generate fresh catalog and schemas''' user_options =", "read the description if it's there with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read()", "files\"\"\" description = \"Custom clean command; remove schema files and XML catalog\" def", "distutils.command.build_py import build_py from distutils.command.clean import clean from distutils.command.sdist import sdist from distutils.core", "sdist.run(self) CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable', 'Framework :: Django',", "package\"\"\" from distutils.command.build_py import build_py from distutils.command.clean import clean from distutils.command.sdist import sdist", "# unittest2 should only be included for py2.6 if sys.version_info < (2, 7):", "CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License,", "catalog and all downloaded schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based", "eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method to check if catalog", "find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup command to generate fresh catalog and", "shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and", "def run(self): from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method to", "class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and lextab modules.\"\"\" def run(self): #", "there with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements = [", "XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build and schema", "Status :: 5 - Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers', 'License", "generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure", "'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python", "sys import shutil from setuptools import setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom", "package_data={'eulxml': [ # include schema catalog and all downloaded schemas in the package", "to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to", "sdist command to ensure schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS", "author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4',", "forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist", "Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic ::", "(2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog", "'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] # NOTE: dev requirements should be duplicated", "# importing this forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class", "generate fresh catalog and schemas''' user_options = [] def initialize_options(self): \"\"\"init options\"\"\" pass", "schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status", "SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema catalog is included.\"\"\" def run(self): generate_catalog_if_needed()", "all downloaded schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data", "license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ],", "Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Markup ::", "'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming", "SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0',", "Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python", "OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming", ":: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python ::", "in the package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with Django", "run(self): # remove schema data and then do any other normal cleaning try:", "fresh catalog and schemas''' user_options = [] def initialize_options(self): \"\"\"init options\"\"\" pass def", "generate parsetab and lextab modules.\"\"\" def run(self): # importing this forces ply to", "Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language", ":: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python ::", "Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python", "if catalog is present, and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog", "other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply", "Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django':", "[ # include schema catalog and all downloaded schemas in the package '%s/*'", "setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'],", "this forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend", "generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\")", "helper method to check if catalog is present, and generate if not if", "only be included for py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={", "ply to generate parsetab and lextab modules.\"\"\" def run(self): # importing this forces", "modules.\"\"\" def run(self): # importing this forces ply to generate parsetab/lextab import eulxml.xpath.core", "included for py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly,", "catalog\" def run(self): # remove schema data and then do any other normal", ":: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System", "pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ]", "\"\"\"Custom cleanup command to delete build and schema files\"\"\" description = \"Custom clean", ":: Libraries :: Python Modules', 'Topic :: Text Processing :: Markup :: XML',", "import Command import os import sys import shutil from setuptools import setup, find_packages", "CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build and schema files\"\"\" description = \"Custom", "schemas''' user_options = [] def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\"", "def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog import generate_catalog generate_catalog() def", ":: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python ::", "from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command", "Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python", "[ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] # NOTE: dev", "for eulxml package\"\"\" from distutils.command.build_py import build_py from distutils.command.clean import clean from distutils.command.sdist", "desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0',", "remove schema data and then do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except", ":: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language ::", "Approved :: Apache Software License', 'Natural Language :: English', 'Operating System :: OS", "and XML catalog\" def run(self): # remove schema data and then do any", "initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog", "'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache", ":: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python ::", "generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build and schema files\"\"\" description", "import setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup command to generate fresh", "BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and lextab modules.\"\"\" def run(self): # importing", "run(self): # importing this forces ply to generate parsetab/lextab import eulxml.xpath.core generate_catalog_if_needed() build_py.run(self)", "schema data and then do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError:", "unittest2 should only be included for py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2')", "duplicated in pip-dev-req.txt # for generating documentation on readthedocs.org # unittest2 should only", ":: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language ::", ":: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python ::", "schema files\"\"\" description = \"Custom clean command; remove schema files and XML catalog\"", "'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming", "3.5', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text", "import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete", "'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python',", "License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language ::", "package '%s/*' % eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with Django form support',", "present, and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating", "Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License',", "generate_catalog() def generate_catalog_if_needed(): # helper method to check if catalog is present, and", "Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language", "BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog }, name='eulxml', version=eulxml.__version__, author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml',", "# read the description if it's there with open('README.rst') as desc_f: LONG_DESCRIPTION =", "setuptools import setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup command to generate", ":: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language ::", "None try: # read the description if it's there with open('README.rst') as desc_f:", "\"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): #", "'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] # NOTE: dev requirements", "Python Modules', 'Topic :: Text Processing :: Markup :: XML', ] LONG_DESCRIPTION =", "'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating", "'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] # NOTE: dev requirements should", "English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language ::", "from distutils.command.sdist import sdist from distutils.core import Command import os import sys import", "do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py):", "], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev': dev_requirements", "open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage',", "include schema catalog and all downloaded schemas in the package '%s/*' % eulxml.SCHEMA_DATA_DIR", "[] def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self):", "def run(self): generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable',", "readthedocs.org # unittest2 should only be included for py2.6 if sys.version_info < (2,", "Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Markup", "3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5',", "any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR) except OSError: pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use", "distutils.core import Command import os import sys import shutil from setuptools import setup,", "XML catalog\" def run(self): # remove schema data and then do any other", ":: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software", "delete build and schema files\"\"\" description = \"Custom clean command; remove schema files", "2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3',", "= [] def initialize_options(self): \"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def", "Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language", ":: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software", "generate_catalog_if_needed() sdist.run(self) CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable', 'Framework ::", ":: XML', ] LONG_DESCRIPTION = None try: # read the description if it's", "# helper method to check if catalog is present, and generate if not", "not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog print(\"Cenerating XML catalog...\") generate_catalog() class", "'ply>=3.8', ], install_requires=[ 'ply>=3.8', 'lxml>=3.4', 'six>=1.10', ], extras_require={ 'django': ['Django<1.9'], 'rdf': ['rdflib>=3.0'], 'dev':", "# remove schema data and then do any other normal cleaning try: shutil.rmtree(eulxml.XMLCATALOG_DIR)", "def run(self): # remove schema data and then do any other normal cleaning", "is present, and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from eulxml.catalog import generate_catalog", "clean from distutils.command.sdist import sdist from distutils.core import Command import os import sys", "catalog...\") generate_catalog() class CleanSchemaData(clean): \"\"\"Custom cleanup command to delete build and schema files\"\"\"", "generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method to check if catalog is present,", "= [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] # NOTE:", "% eulxml.SCHEMA_DATA_DIR ]}, description='XPath-based XML data binding, with Django form support', long_description=LONG_DESCRIPTION, classifiers=CLASSIFIERS,", "pass clean.run(self) class BuildPyWithPly(build_py): \"\"\"Use ply to generate parsetab and lextab modules.\"\"\" def", "from distutils.command.clean import clean from distutils.command.sdist import sdist from distutils.core import Command import", "generating documentation on readthedocs.org # unittest2 should only be included for py2.6 if", "author='<NAME>', author_email='<EMAIL>', url='https://github.com/emory-libraries/eulxml', license='Apache License, Version 2.0', packages=find_packages(), setup_requires=[ 'ply>=3.8', ], install_requires=[ 'ply>=3.8',", "generate_catalog_if_needed() build_py.run(self) class SdistWithCatalog(sdist): \"\"\"Extend sdist command to ensure schema catalog is included.\"\"\"", "'mock', 'nose', 'tox', 'requests', ] # NOTE: dev requirements should be duplicated in", "the description if it's there with open('README.rst') as desc_f: LONG_DESCRIPTION = desc_f.read() except:", "import shutil from setuptools import setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup", "distutils.command.clean import clean from distutils.command.sdist import sdist from distutils.core import Command import os", "CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable', 'Framework :: Django', 'Intended", "'Django<1.9', 'rdflib>=3.0', 'mock', 'nose', 'tox', 'requests', ] # NOTE: dev requirements should be", "= desc_f.read() except: pass dev_requirements = [ 'sphinx>=1.3.5', 'coverage', 'Django<1.9', 'rdflib>=3.0', 'mock', 'nose',", "\"\"\"init options\"\"\" pass def finalize_options(self): \"\"\"finalize options\"\"\" pass def run(self): from eulxml.catalog import", "run(self): from eulxml.catalog import generate_catalog generate_catalog() def generate_catalog_if_needed(): # helper method to check", "be included for py2.6 if sys.version_info < (2, 7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py':", "7): dev_requirements.append('unittest2') setup( cmdclass={ 'build_py': BuildPyWithPly, 'clean': CleanSchemaData, 'sdist': SdistWithCatalog, 'xmlcatalog': GenerateXmlCatalog },", ":: Python Modules', 'Topic :: Text Processing :: Markup :: XML', ] LONG_DESCRIPTION", "from setuptools import setup, find_packages import eulxml class GenerateXmlCatalog(Command): '''Custom setup command to", "# for generating documentation on readthedocs.org # unittest2 should only be included for", "[ 'Development Status :: 5 - Production/Stable', 'Framework :: Django', 'Intended Audience ::", "python \"\"\"Setup.py for eulxml package\"\"\" from distutils.command.build_py import build_py from distutils.command.clean import clean", "check if catalog is present, and generate if not if not os.path.exists(eulxml.XMLCATALOG_FILE): from", "Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language", "OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System ::", "'nose', 'tox', 'requests', ] # NOTE: dev requirements should be duplicated in pip-dev-req.txt" ]
[ "id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) #", "import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api):", "setup.create_state_obj(s, config=s.configC1) # need this to log if create_state_obj set 'log=True', otherwise this", "need this to log if create_state_obj set 'log=True', otherwise this will # do", "= setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list)", "target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api assert api.domain == domain assert len(api.email)", "cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered: Except.DNSProcessingError # record is already up:", "up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2)", "as file: lines = file.read().splitlines() domain = None for l in lines: m", "sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog,", "domain domain = get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog =", "= file.read().splitlines() domain = None for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l)", "lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain') return domain def", "setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) == 1 target = prog.target_list[0] assert len(target.tlsa)", "not up: Except.DNSNotLive assert len(records211) == 1 assert hash211 in records211 assert len(records311)", "from time import sleep from alnitak import config from alnitak.api import cloudflare from", "Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1)", "# need the domain domain = get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api,", "record is already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311 =", "from alnitak import prog as Prog from alnitak import exceptions as Except @pytest.fixture(scope=\"module\")", "1 target = prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1 in target.tlsa assert", "id211 = records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api,", "re from pathlib import Path from time import sleep from alnitak import config", "file: lines = file.read().splitlines() domain = None for l in lines: m =", "cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered: Except.DNSProcessingError #", "tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api,", "= cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError", "len(api.email) > 0 assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311]", "cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError # record is not up: Except.DNSNotLive", "domain = get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s,", "up: Except.DNSNotLive assert len(records211) == 1 assert hash211 in records211 assert len(records311) ==", "= None for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain", "assert len(records211) == 1 assert hash211 in records211 assert len(records311) == 1 assert", "assert t_a1 in target.tlsa assert t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2 =", "def get_domain(api_path): with open(str(api_path), 'r') as file: lines = file.read().splitlines() domain = None", "cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex:", "get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) #", "assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) # need", "set 'log=True', otherwise this will # do nothing. with prog.log: retval = config.read(prog)", "records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered:", "== Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp',", "pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa2)", "log if create_state_obj set 'log=True', otherwise this will # do nothing. with prog.log:", "= cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError # record is not up:", "1 assert hash311 in records311 id211 = records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog,", "import sleep from alnitak import config from alnitak.api import cloudflare from alnitak.tests import", "True return False def get_domain(api_path): with open(str(api_path), 'r') as file: lines = file.read().splitlines()", "assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311',", "import config from alnitak.api import cloudflare from alnitak.tests import setup from alnitak import", "otherwise this will # do nothing. with prog.log: retval = config.read(prog) assert retval", "in target.tlsa assert t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api", "s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) # need this to", "with open(str(api_path), 'r') as file: lines = file.read().splitlines() domain = None for l", "pytest.skip(\"no cloudflare.api file\") # need the domain domain = get_domain(cloudflare_api) assert domain s", "= target.tlsa[1] api = target.api assert api.domain == domain assert len(api.email) > 0", "records311 = cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError # record is not", "sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered: Except.DNSProcessingError", "if m: domain = m.group('domain') return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no", "assert len(prog.target_list) == 1 target = prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1", "hash311) # error encountered: Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing sleep(3) records211", "assert len(records311) == 1 assert hash311 in records311 id211 = records211[hash211] id311 =", "setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) ==", "domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) == 1 target =", "assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1,", "= m.group('domain') return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") #", "if create_state_obj set 'log=True', otherwise this will # do nothing. with prog.log: retval", "2 assert t_a1 in target.tlsa assert t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2", "t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api assert", "return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the", "alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def", "the domain domain = get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog", "api = target.api assert api.domain == domain assert len(api.email) > 0 assert len(api.key)", "s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) # need this to log if create_state_obj", "cloudflare from alnitak.tests import setup from alnitak import prog as Prog from alnitak", "cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError #", "from alnitak.api import cloudflare from alnitak.tests import setup from alnitak import prog as", "encountered: Except.DNSProcessingError # record is not up: Except.DNSNotLive assert len(records211) == 1 assert", "lines = file.read().splitlines() domain = None for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$',", "encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with pytest.raises(Except.DNSNotLive) as", "pathlib import Path from time import sleep from alnitak import config from alnitak.api", "sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) # error", "records211 = cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) # error encountered:", "file.read().splitlines() domain = None for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if", "= setup.create_state_obj(s, config=s.configC1) # need this to log if create_state_obj set 'log=True', otherwise", "1 assert hash211 in records211 assert len(records311) == 1 assert hash311 in records311", "alnitak import prog as Prog from alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def", "import cloudflare from alnitak.tests import setup from alnitak import prog as Prog from", "as Prog from alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname)", "0 assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api,", "with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api,", "domain) assert len(prog.target_list) == 1 target = prog.target_list[0] assert len(target.tlsa) == 2 assert", "target.tlsa assert t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api =", "= target.api assert api.domain == domain assert len(api.email) > 0 assert len(api.key) >", "encountered: Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api,", "nothing. with prog.log: retval = config.read(prog) assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211',", "target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api assert api.domain ==", "tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive)", "len(target.tlsa) == 2 assert t_a1 in target.tlsa assert t_a2 in target.tlsa tlsa1 =", "if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the domain domain = get_domain(cloudflare_api)", "def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return", "prog = setup.create_state_obj(s, config=s.configC1) # need this to log if create_state_obj set 'log=True',", "config.read(prog) assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 =", "# do nothing. with prog.log: retval = config.read(prog) assert retval == Prog.RetVal.ok t_a2", "# error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with", "Path from time import sleep from alnitak import config from alnitak.api import cloudflare", "== 1 target = prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1 in target.tlsa", "== 1 assert hash311 in records311 id211 = records211[hash211] id311 = records311[hash311] sleep(3)", "error encountered: Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog,", "return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False def", "target.tlsa[1] api = target.api assert api.domain == domain assert len(api.email) > 0 assert", "tlsa2, hash311) # error encountered: Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing sleep(3)", "m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain') return domain def test_cloudflare(cloudflare_api):", "alnitak.api import cloudflare from alnitak.tests import setup from alnitak import prog as Prog", "len(records211) == 1 assert hash211 in records211 assert len(records311) == 1 assert hash311", "need the domain domain = get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain)", "s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered: Except.DNSProcessingError", "already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api,", "api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3) with", "domain = m.group('domain') return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\")", "alnitak.tests import setup from alnitak import prog as Prog from alnitak import exceptions", "domain assert len(api.email) > 0 assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311", "'tcp', domain) assert len(prog.target_list) == 1 target = prog.target_list[0] assert len(target.tlsa) == 2", "api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the domain domain = get_domain(cloudflare_api) assert domain", "tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api assert api.domain == domain", "file\") # need the domain domain = get_domain(cloudflare_api) assert domain s = setup.Init(keep=True)", "/ 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False def get_domain(api_path): with", "domain = None for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m:", "== 1 assert hash211 in records211 assert len(records311) == 1 assert hash311 in", "time import sleep from alnitak import config from alnitak.api import cloudflare from alnitak.tests", "cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False", "return False def get_domain(api_path): with open(str(api_path), 'r') as file: lines = file.read().splitlines() domain", "l) if m: domain = m.group('domain') return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api):", "m.group('domain') return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need", "from alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api'", "prog as Prog from alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return", "domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the domain", "t_a1 in target.tlsa assert t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1]", "id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as", "id311) # error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1)", "not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the domain domain = get_domain(cloudflare_api) assert", "# error encountered: Except.DNSProcessingError # record is not up: Except.DNSNotLive assert len(records211) ==", "get_domain(api_path): with open(str(api_path), 'r') as file: lines = file.read().splitlines() domain = None for", "error encountered: Except.DNSProcessingError # record is not up: Except.DNSNotLive assert len(records211) == 1", "def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False def get_domain(api_path): with open(str(api_path), 'r')", "None for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain =", "= prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1 in target.tlsa assert t_a2 in", "= records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error", "with prog.log: retval = config.read(prog) assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527',", "to log if create_state_obj set 'log=True', otherwise this will # do nothing. with", "l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain') return", "Prog from alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) /", "= setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) # need this to log", "s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) #", "hash211 in records211 assert len(records311) == 1 assert hash311 in records311 id211 =", "will # do nothing. with prog.log: retval = config.read(prog) assert retval == Prog.RetVal.ok", "= target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api assert api.domain == domain assert", "import prog as Prog from alnitak import exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request):", "hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered: Except.DNSProcessingError # record is already", "= setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) == 1 target = prog.target_list[0] assert", "import re from pathlib import Path from time import sleep from alnitak import", "setup from alnitak import prog as Prog from alnitak import exceptions as Except", "open(str(api_path), 'r') as file: lines = file.read().splitlines() domain = None for l in", "cloudflare.api file\") # need the domain domain = get_domain(cloudflare_api) assert domain s =", "Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain)", "tlsa2) # error encountered: Except.DNSProcessingError # record is not up: Except.DNSNotLive assert len(records211)", "pytest import re from pathlib import Path from time import sleep from alnitak", "# need this to log if create_state_obj set 'log=True', otherwise this will #", "0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api,", "from alnitak.tests import setup from alnitak import prog as Prog from alnitak import", "records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311)", "= s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered:", "'r') as file: lines = file.read().splitlines() domain = None for l in lines:", "is already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog,", "def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the domain domain", "from alnitak import config from alnitak.api import cloudflare from alnitak.tests import setup from", "api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError # record", "retval = config.read(prog) assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain)", "if cloudflare_api.exists(): return True return False def get_domain(api_path): with open(str(api_path), 'r') as file:", "t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert", "assert t_a2 in target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api", "hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2,", "assert hash311 in records311 id211 = records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api,", "tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered: Except.DNSProcessingError # record is", "m: domain = m.group('domain') return domain def test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api", "hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error", "== domain assert len(api.email) > 0 assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211]", "= config.read(prog) assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1", "# error encountered: Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing sleep(3) records211 =", "Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False def get_domain(api_path):", "api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311) # error encountered: Except.DNSProcessingError # record", "this to log if create_state_obj set 'log=True', otherwise this will # do nothing.", "= re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain') return domain def test_cloudflare(cloudflare_api): if", "in records211 assert len(records311) == 1 assert hash311 in records311 id211 = records211[hash211]", "assert len(api.email) > 0 assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311 =", "records311 id211 = records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog,", "False def get_domain(api_path): with open(str(api_path), 'r') as file: lines = file.read().splitlines() domain =", "Except.DNSProcessingError # record is not up: Except.DNSNotLive assert len(records211) == 1 assert hash211", "> 0 assert len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog,", "assert api.domain == domain assert len(api.email) > 0 assert len(api.key) > 0 hash211", "cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3)", "test_cloudflare(cloudflare_api): if not api_file_exists(cloudflare_api): pytest.skip(\"no cloudflare.api file\") # need the domain domain =", "tlsa2 = target.tlsa[1] api = target.api assert api.domain == domain assert len(api.email) >", "is not up: Except.DNSNotLive assert len(records211) == 1 assert hash211 in records211 assert", "target = prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1 in target.tlsa assert t_a2", "import pytest import re from pathlib import Path from time import sleep from", "in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain') return domain", "= s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog, api, tlsa2, hash311)", "prog.log: retval = config.read(prog) assert retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp',", "'53527', 'tcp', domain) assert len(prog.target_list) == 1 target = prog.target_list[0] assert len(target.tlsa) ==", "'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False def get_domain(api_path): with open(str(api_path),", "domain) prog = setup.create_state_obj(s, config=s.configC1) # need this to log if create_state_obj set", "Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return", "domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) # need this", "api, tlsa2) # error encountered: Except.DNSProcessingError # record is not up: Except.DNSNotLive assert", "@pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True", "len(records311) == 1 assert hash311 in records311 id211 = records211[hash211] id311 = records311[hash311]", "do nothing. with prog.log: retval = config.read(prog) assert retval == Prog.RetVal.ok t_a2 =", "from pathlib import Path from time import sleep from alnitak import config from", "api.domain == domain assert len(api.email) > 0 assert len(api.key) > 0 hash211 =", "setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1) # need this to log if", "return True return False def get_domain(api_path): with open(str(api_path), 'r') as file: lines =", "Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) #", "api, tlsa2, id311) # error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog,", "for l in lines: m = re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain')", "records211 assert len(records311) == 1 assert hash311 in records311 id211 = records211[hash211] id311", "Except.DNSNotLive assert len(records211) == 1 assert hash211 in records211 assert len(records311) == 1", "config=s.configC1) # need this to log if create_state_obj set 'log=True', otherwise this will", "Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with pytest.raises(Except.DNSNotLive) as ex:", "'53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) == 1", "this will # do nothing. with prog.log: retval = config.read(prog) assert retval ==", "alnitak import config from alnitak.api import cloudflare from alnitak.tests import setup from alnitak", "retval == Prog.RetVal.ok t_a2 = setup.create_tlsa_obj('211', '53527', 'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527',", "error encountered: Except.DNSProcessingError sleep(3) with pytest.raises(Except.DNSNotLive) as ex: cloudflare.api_read(prog, api, tlsa1) with pytest.raises(Except.DNSNotLive)", "> 0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211) cloudflare.api_publish(prog,", "'log=True', otherwise this will # do nothing. with prog.log: retval = config.read(prog) assert", "# record is not up: Except.DNSNotLive assert len(records211) == 1 assert hash211 in", "hash311 in records311 id211 = records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1,", "assert len(target.tlsa) == 2 assert t_a1 in target.tlsa assert t_a2 in target.tlsa tlsa1", "import Path from time import sleep from alnitak import config from alnitak.api import", "len(prog.target_list) == 1 target = prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1 in", "== 2 assert t_a1 in target.tlsa assert t_a2 in target.tlsa tlsa1 = target.tlsa[0]", "prog.target_list[0] assert len(target.tlsa) == 2 assert t_a1 in target.tlsa assert t_a2 in target.tlsa", "target.api assert api.domain == domain assert len(api.email) > 0 assert len(api.key) > 0", "cloudflare_api.exists(): return True return False def get_domain(api_path): with open(str(api_path), 'r') as file: lines", "re.match(r'\\s*#.*domain:\\s*(?P<domain>\\S+)\\s*$', l) if m: domain = m.group('domain') return domain def test_cloudflare(cloudflare_api): if not", "# record is already up: Except.DNSSkipProcessing sleep(3) records211 = cloudflare.api_read(prog, api, tlsa1) records311", "sleep from alnitak import config from alnitak.api import cloudflare from alnitak.tests import setup", "as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if cloudflare_api.exists():", "len(api.key) > 0 hash211 = s.hash['a.com']['cert1'][211] hash311 = s.hash['a.com']['cert1'][311] cloudflare.api_publish(prog, api, tlsa1, hash211)", "in records311 id211 = records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211)", "= get_domain(cloudflare_api) assert domain s = setup.Init(keep=True) s.create_cloudflare_config(cloudflare_api, domain) prog = setup.create_state_obj(s, config=s.configC1)", "in target.tlsa tlsa1 = target.tlsa[0] tlsa2 = target.tlsa[1] api = target.api assert api.domain", "assert hash211 in records211 assert len(records311) == 1 assert hash311 in records311 id211", "= records211[hash211] id311 = records311[hash311] sleep(3) cloudflare.api_delete(prog, api, tlsa1, id211) cloudflare.api_delete(prog, api, tlsa2,", "config from alnitak.api import cloudflare from alnitak.tests import setup from alnitak import prog", "api, tlsa2, hash311) # error encountered: Except.DNSProcessingError # record is already up: Except.DNSSkipProcessing", "tlsa1) records311 = cloudflare.api_read(prog, api, tlsa2) # error encountered: Except.DNSProcessingError # record is", "create_state_obj set 'log=True', otherwise this will # do nothing. with prog.log: retval =", "record is not up: Except.DNSNotLive assert len(records211) == 1 assert hash211 in records211", "import setup from alnitak import prog as Prog from alnitak import exceptions as", "api_file_exists(cloudflare_api): if cloudflare_api.exists(): return True return False def get_domain(api_path): with open(str(api_path), 'r') as", "t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) == 1 target = prog.target_list[0]", "exceptions as Except @pytest.fixture(scope=\"module\") def cloudflare_api(request): return Path(request.fspath.dirname) / 'cloudflare.api' def api_file_exists(cloudflare_api): if", "'tcp', domain) t_a1 = setup.create_tlsa_obj('311', '53527', 'tcp', domain) assert len(prog.target_list) == 1 target" ]
[]
[ "\"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for", "in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request {} again", "if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\ not p.find_all(\"input\") and not", "for dates_range in dates_separated_into_ranges_list: for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6],", "import config from Kite import utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d]", "!= 0: # latest_date_str = max(extracted_data_list).split(\" \")[0] # else: # latest_date_str = start_date", "dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for", "format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self):", "minutes, \" \"and the failed url has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS,", "-1 and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"],", "file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than {} minutes, \" \"and the", "{} is {} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list", "+ 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url)", "article] def get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情", "1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"]))", "logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article = result while article", ".1 result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if self.terminated_amount", "[date, article] def get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10", "utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date) # for qr in query_results: #", "!= \"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data)", "a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass", "not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and not", "__name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") # jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2016-04-15\", \"2020-12-03\")", "= utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in a_list: if \"href\" in a.attrs", "\"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount =", "and not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {} and", "result while article == \"\" and self.is_article_prob >= .1: self.is_article_prob -= .1 result", "bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in a_list: if \"href\" in", "= \"\" for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {}", "and not p.find_all( \"i\") and not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and", "# 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: # latest_date_str", "url): pass if __name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") #", "\\ # not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article", "in dates_separated_into_ranges_list: for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"))", "failed url has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by", "%Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount", "while article == \"\" and self.is_article_prob >= .1: self.is_article_prob -= .1 result =", "than {} minutes, \" \"and the failed url has been written in path", "# # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0]", "and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not", "Spyder from Kite import config from Kite import utils import time import logging", "time in database is {} ... \".format(latest_date_str)) # crawled_urls_list = list() # for", "qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned #", "a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") ==", "get_realtime_news(self, url): pass if __name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\")", "max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs =", "and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and not p.find_all(\"span\"): #", "\"\" for span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\",", "if \"href\" in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) !=", "attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and not p.find_all(\"span\"): # if p.contents[0] !=", "1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list", "self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url, specific_date): try: bs = utils.html_parser(url)", "0 def get_url_info(self, url, specific_date): try: bs = utils.html_parser(url) except Exception: return False", "if not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date,", "== -1 and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result =", "get_url_info(self, url, specific_date): try: bs = utils.html_parser(url) except Exception: return False date =", "import Spyder from Kite import config from Kite import utils import time import", "\" \"and the failed url has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH))", "crawled data from {} to {} is {} ... \".format(start_date, # latest_date_str, #", "p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\",", "a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount", "self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by", "... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date)", "-= .1 result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if", "extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\" \")[0]", "\".format(latest_date_str)) # crawled_urls_list = list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results", "# 有返回但是article为null的情况 article_specific_date, article = result while article == \"\" and self.is_article_prob >=", "latest_date_str): # query_results = self.query_news(\"Date\", _date) # for qr in query_results: # crawled_urls_list.append(qr[\"Url\"])", "金融界:http://www.jrj.com.cn 股票频道全部新闻:http://stock.jrj.com.cn/xwk/202012/20201203_1.shtml \"\"\" import __init__ from spyder import Spyder from Kite import config", "article = result while article == \"\" and self.is_article_prob >= .1: self.is_article_prob -=", "a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1", "\"\") return [date, article] def get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 #", "{} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article = result while article ==", "60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not result:", "request {} again after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount)", "self.query_news(\"Date\", _date) # for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list =", "result = self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"]))", "\"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num in range(1,", "{} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def", "logging.info(\"latest time in database is {} ... \".format(latest_date_str)) # crawled_urls_list = list() #", "self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob", "{}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article = result while article == \"\"", "+= 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file:", "dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for", "bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\ not p.find_all(\"input\") and", "logging.info(\"rejected by remote server, request {} again after \" \"{} seconds...\".format(a[\"href\"], 60 *", "\")[0] # else: # latest_date_str = start_date # logging.info(\"latest time in database is", "# crawled_urls_list = list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results =", "# logging.info(\"latest time in database is {} ... \".format(latest_date_str)) # crawled_urls_list = list()", "date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date == \"\": date = specific_date", "datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col =", "article = result self.is_article_prob = .5 if article != \"\": data = {\"Date\":", "latest_date_str = start_date # logging.info(\"latest time in database is {} ... \".format(latest_date_str)) #", "and \\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\")", "not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\",", "= list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in", "not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and not p.find_all(\"span\"): # if", "not p.find_all( \"i\") and not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs", "date = \"\" for span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date =", "from Kite import utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',", "= span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date == \"\": date = specific_date article", "open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than {} minutes,", "_date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date) # for qr in", "in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date) # for qr in query_results:", "utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\",", "\"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article]", "path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request {} again after", "{} again after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result", "in range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num))", "longer than {} minutes, \" \"and the failed url has been written in", "\" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date)", "if article != \"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\":", "# 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) !=", "self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况", "for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date) # for qr", "# 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\")", "remote server, request {} again after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60", "= .5 if article != \"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\":", "爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article = result while", "from {} to {} is {} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list", "\"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if", "length of crawled data from {} to {} is {} ... \".format(start_date, #", "not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\ not p.find_all(\"input\") and not p.find_all(\"a\",", "\"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if", "article_specific_date, article = result while article == \"\" and self.is_article_prob >= .1: self.is_article_prob", "\"\"\" import __init__ from spyder import Spyder from Kite import config from Kite", "p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return", "crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of crawled", "self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob = .5 if article != \"\":", "date == \"\": date = specific_date article = \"\" for p in bs.find_all(\"p\"):", "not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\")", "# # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍", "\"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") # jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2016-04-15\", \"2020-12-03\") # TODO:继续爬取RECORD_JRJ_FAILED_URL_TXT_FILE_PATH文件中失败的URL", "not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article", "Kite import config from Kite import utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s", "def get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 #", "not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {} and \\", "\"\") break if date == \"\": date = specific_date article = \"\" for", "= 0 def get_url_info(self, url, specific_date): try: bs = utils.html_parser(url) except Exception: return", "in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the", "%H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount =", "dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date in dates_range: first_url", "__init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url,", "\"and the failed url has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break", "def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self,", "and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\",", "a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not in", "= \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num in", "def get_realtime_news(self, url): pass if __name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\",", "a_list = bs.find_all(\"a\") for a in a_list: if \"href\" in a.attrs and a.string", "not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1", "{}\".format(a.string)) def get_realtime_news(self, url): pass if __name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ,", "break if date == \"\": date = specific_date article = \"\" for p", "\"red\"}) and not p.find_all( \"i\") and not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\"", "jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") # jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2016-04-15\", \"2020-12-03\") # TODO:继续爬取RECORD_JRJ_FAILED_URL_TXT_FILE_PATH文件中失败的URL pass", "if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {} and \\ # not p.find_all(\"input\")", "list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date) #", "num in range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"),", "a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0", "date) for num in range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6],", "specific_date article = \"\" for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs", "= specific_date article = \"\" for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and", "data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {}", "self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else:", ".5 if article != \"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string,", "\"\": date = specific_date article = \"\" for p in bs.find_all(\"p\"): if not", "not result: self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH,", "date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url,", "\\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not in crawled_urls_list: #", "data from {} to {} is {} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list)))", "and p.attrs == {} and \\ # not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\":", "pass if __name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") # jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ,", "logging.info(\"the length of crawled data from {} to {} is {} ... \".format(start_date,", "p.attrs == {} and \\ # not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"})", "a in a_list: if \"href\" in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4],", "%b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ)", "else: # 有返回但是article为null的情况 article_specific_date, article = result while article == \"\" and self.is_article_prob", "import __init__ from spyder import Spyder from Kite import config from Kite import", "by remote server, request {} again after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount))", "first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num", "if len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\" \")[0] # else: # latest_date_str", "\"jrj_final_daohang_start1\" and p.attrs == {} and \\ # not p.find_all(\"input\") and not p.find_all(\"a\",", "config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\",", "\"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS]", "seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article", "result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article =", ".1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount +=", "article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self, url, start_date,", "\"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than {} minutes, \"", "p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self, url,", "date) if not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况", "bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date", "written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request {}", "\"\" and self.is_article_prob >= .1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date) while", "= self.query_news(\"Date\", _date) # for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list", "logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder):", "utils.html_parser(url) except Exception: return False date = \"\" for span in bs.find_all(\"span\"): if", "dates_range in dates_separated_into_ranges_list: for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\",", "and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def", "specific_date): try: bs = utils.html_parser(url) except Exception: return False date = \"\" for", "is {} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list =", "date = specific_date article = \"\" for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\")", "max_pages_num = utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num + 1): _url =", "\"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date,", "while not result: self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with", "return False date = \"\" for span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\":", "from spyder import Spyder from Kite import config from Kite import utils import", "utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date in", "and \\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount", "__init__ from spyder import Spyder from Kite import config from Kite import utils", "config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request {} again after \" \"{} seconds...\".format(a[\"href\"],", "\"\").replace(\"\\n\", \"\") break if date == \"\": date = specific_date article = \"\"", "%(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col", "# 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) #", "15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list =", "-1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date) while not result:", "a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article = result while article == \"\" and", "始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than", "for a in a_list: if \"href\" in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\",", "-1: if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and", "= max(extracted_data_list).split(\" \")[0] # else: # latest_date_str = start_date # logging.info(\"latest time in", "self.is_article_prob = .5 if article != \"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"],", "{\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date,", "self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as", "time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S')", "import utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d", "p.attrs == {} and \\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and", "crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of crawled data from {}", "\"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self, url, start_date, end_date): # #", "end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # #", "\"\"\" 金融界:http://www.jrj.com.cn 股票频道全部新闻:http://stock.jrj.com.cn/xwk/202012/20201203_1.shtml \"\"\" import __init__ from spyder import Spyder from Kite import", "# else: # latest_date_str = start_date # logging.info(\"latest time in database is {}", "super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url, specific_date):", "# len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE)", "list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list:", ">= .1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount", "len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\" \")[0] # else: # latest_date_str =", "url has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote", "utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b", "bs.find_all(\"a\") for a in a_list: if \"href\" in a.attrs and a.string and \\", "again after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result =", "span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break", "= start_date # logging.info(\"latest time in database is {} ... \".format(latest_date_str)) # crawled_urls_list", "{} and \\ # not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not", "return [date, article] def get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # #", "{}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request {} again after \"", "for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\", "# 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 #", "not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self,", "作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0:", "date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for a", "database is {} ... \".format(latest_date_str)) # crawled_urls_list = list() # for _date in", "{} minutes, \" \"and the failed url has been written in path {}\"", "date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in a_list:", "\"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date == \"\": date =", "span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date == \"\": date = specific_date article =", "config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server", ".format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request {} again after \" \"{}", "_url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list =", "{} ... \".format(latest_date_str)) # crawled_urls_list = list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str):", "latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list,", "# crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of crawled data from", "= list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date)", "# abandoned # logging.info(\"the length of crawled data from {} to {} is", "== \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") # jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2016-04-15\", \"2020-12-03\") #", "of crawled data from {} to {} is {} ... \".format(start_date, # latest_date_str,", "如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") ==", "+= p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self, url, start_date, end_date):", "config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url, specific_date): try: bs = utils.html_parser(url) except", "\"\" for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and", "article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string,", "file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than {} minutes, \" \"and the failed", "有返回但是article为null的情况 article_specific_date, article = result while article == \"\" and self.is_article_prob >= .1:", "self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: #", "* self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article = result", "and \\ # not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"):", "== \"\": date = specific_date article = \"\" for p in bs.find_all(\"p\"): if", "\"\"), str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in a_list: if", "and p.attrs == {} and \\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"})", "self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string,", "# if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {} and \\ # not", "date) article_specific_date, article = result self.is_article_prob = .5 if article != \"\": data", "False date = \"\" for span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date", "range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs", "== -1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date) while not", "= self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else:", "start_date # logging.info(\"latest time in database is {} ... \".format(latest_date_str)) # crawled_urls_list =", "else: # latest_date_str = start_date # logging.info(\"latest time in database is {} ...", "# crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of", "p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self, url, start_date, end_date): #", "logging.info(\"rejected by remote server longer than {} minutes, \" \"and the failed url", "!= -1: if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1", "self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if", "p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and not p.find_all(\"span\"):", "to {} is {} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list()", "\"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num +", "and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not in crawled_urls_list:", "# 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer", "# query_results = self.query_news(\"Date\", _date) # for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) #", "class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0", "result: self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\")", "p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\":", "= self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of crawled data from {} to", "= utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date", "self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1", "end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date in dates_range:", "import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y", "article_specific_date, article = result self.is_article_prob = .5 if article != \"\": data =", "bs = utils.html_parser(url) except Exception: return False date = \"\" for span in", "# if len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\" \")[0] # else: #", "= self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS:", "has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server,", "\\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and", "query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length", "article == \"\" and self.is_article_prob >= .1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"],", "60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article =", "开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: # latest_date_str =", "爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if __name__ == \"__main__\": jrj_spyder", "== \"\" and self.is_article_prob >= .1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date)", "= {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {}", "= result while article == \"\" and self.is_article_prob >= .1: self.is_article_prob -= .1", "and self.is_article_prob >= .1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date) while not", "# # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定)", "try: bs = utils.html_parser(url) except Exception: return False date = \"\" for span", "{} and \\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(", "str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in a_list: if \"href\"", "date) while not result: self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来", "> config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote", "the failed url has been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected", "from Kite import config from Kite import utils import time import logging logging.basicConfig(level=logging.INFO,", "\"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in", "= 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if __name__", "\"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num + 1): _url", "date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num + 1):", "= \"\" for span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\",", "date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num", "* self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not result: #", "if __name__ == \"__main__\": jrj_spyder = JrjSpyder() jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2017-05-06\", \"2018-01-01\") # jrj_spyder.get_historical_news(config.WEBSITES_LIST_TO_BE_CRAWLED_JRJ, \"2016-04-15\",", "<filename>src/Gon/jrj_spyder.py \"\"\" 金融界:http://www.jrj.com.cn 股票频道全部新闻:http://stock.jrj.com.cn/xwk/202012/20201203_1.shtml \"\"\" import __init__ from spyder import Spyder from Kite", "except Exception: return False date = \"\" for span in bs.find_all(\"span\"): if span.contents[0]", "p.find_all( \"i\") and not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs ==", "Exception: return False date = \"\" for span in bs.find_all(\"span\"): if span.contents[0] ==", "self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\" \")[0] # else:", "抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # #", "# for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] #", "%(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder,", "-1: result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if self.terminated_amount", "for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned", "a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成", "p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {} and \\ #", "== \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date == \"\": date", "self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: #", "== {} and \\ not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not", "= utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date in dates_range: first_url =", "in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1 and", "# # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list)", "in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if", "self.terminated_amount = 0 def get_url_info(self, url, specific_date): try: bs = utils.html_parser(url) except Exception:", "0: # latest_date_str = max(extracted_data_list).split(\" \")[0] # else: # latest_date_str = start_date #", "been written in path {}\" .format(config.JRJ_MAX_REJECTED_AMOUNTS, config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH)) break logging.info(\"rejected by remote server, request", "in a_list: if \"href\" in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\",", "latest_date_str = max(extracted_data_list).split(\" \")[0] # else: # latest_date_str = start_date # logging.info(\"latest time", "== -1: result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if", "else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if __name__ == \"__main__\": jrj_spyder =", "况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if", "server, request {} again after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 *", "= self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob = .5 if article !=", "config from Kite import utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s", "JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def", "# not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article +=", "result = self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob = .5 if article", "self).__init__() self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url, specific_date): try:", "{} to {} is {} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list =", "len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for", "attrs={\"class\": \"red\"}) and not p.find_all(\"i\"): article += p.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\").replace(\"\\u3000\", \"\") return [date,", "股票频道全部新闻:http://stock.jrj.com.cn/xwk/202012/20201203_1.shtml \"\"\" import __init__ from spyder import Spyder from Kite import config from", "= utils.html_parser(url) except Exception: return False date = \"\" for span in bs.find_all(\"span\"):", "span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date == \"\":", "time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况 logging.info(\"[FAILED]", "\"\").replace(\"\\u3000\", \"\") return [date, article] def get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取", "break logging.info(\"rejected by remote server, request {} again after \" \"{} seconds...\".format(a[\"href\"], 60", "{} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string))", "{} ... \".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date,", "Kite import utils import time import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a,", "article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数", "# extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\"", "crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1 and \\", "start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 #", "for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num =", "== {} and \\ # not p.find_all(\"input\") and not p.find_all(\"a\", attrs={\"class\": \"red\"}) and", "self.col = self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url, specific_date): try: bs", "url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍", "remote server longer than {} minutes, \" \"and the failed url has been", "seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not", "= self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: # latest_date_str = max(extracted_data_list).split(\" \")[0] #", "dates_separated_into_ranges_list: for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num", "= result self.is_article_prob = .5 if article != \"\": data = {\"Date\": article_specific_date,", "result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount += 1 if self.terminated_amount >", "if self.terminated_amount > config.JRJ_MAX_REJECTED_AMOUNTS: # 始终无法爬取的URL保存起来 with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected", "for num in range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\",", "a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result", "_date) # for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # # crawled_urls_list = self.extract_data([\"Url\"])[0]", "if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\") break if date ==", "比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # # 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list", "get_historical_news(self, url, start_date, end_date): # # 抽取数据库中已爬取的从start_date到latest_date_str所有新闻,避免重复爬取 # # 比如数据断断续续爬到了2016-10-10 15:00:00时间节点,但是在没调整参数的情 # #", "logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT]", "and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date)", "* self.terminated_amount) result = self.get_url_info(a[\"href\"], date) if not result: # 爬取失败的情况 logging.info(\"[FAILED] {}", "self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of crawled data from {} to {}", "utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range in dates_separated_into_ranges_list: for date in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url,", "!= \"jrj_final_daohang_start1\" and p.attrs == {} and \\ # not p.find_all(\"input\") and not", "max(extracted_data_list).split(\" \")[0] # else: # latest_date_str = start_date # logging.info(\"latest time in database", "= \"{}/{}/{}_{}.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\"), str(num)) bs = utils.html_parser(_url) a_list = bs.find_all(\"a\")", "is {} ... \".format(latest_date_str)) # crawled_urls_list = list() # for _date in utils.get_date_list_from_range(start_date,", "# 况下,从2015-01-01(自己设定)开始重跑程序会导致大量重复数据,因此在这里稍 # # 作去重。直接从最新的时间节点开始跑是完全没问题,但从2015-01-01(自己设定) # # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] #", "abandoned # logging.info(\"the length of crawled data from {} to {} is {}", "for span in bs.find_all(\"span\"): if span.contents[0] == \"jrj_final_date_start\": date = span.text.replace(\"\\r\", \"\").replace(\"\\n\", \"\")", "\"i\") and not p.find_all(\"span\"): # if p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {}", "logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def", "# logging.info(\"the length of crawled data from {} to {} is {} ...", "if date == \"\": date = specific_date article = \"\" for p in", "# latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list =", "\"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 #", "\\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date) while not result: self.terminated_amount +=", "by remote server longer than {} minutes, \" \"and the failed url has", "a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url):", "%d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__() self.col = self.db_obj.create_col(self.db,", "a_list: if \"href\" in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6]))", "p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\ not", "spyder import Spyder from Kite import config from Kite import utils import time", "in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if", "... \".format(latest_date_str)) # crawled_urls_list = list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str): #", "# latest_date_str = start_date # logging.info(\"latest time in database is {} ... \".format(latest_date_str))", "\"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article} self.col.insert_one(data) logging.info(\"[SUCCESS] {} {} {}\".format(article_specific_date, a.string, a[\"href\"]))", "date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\")", "crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list = utils.gen_dates_list(dates_list, config.JRJ_DATE_RANGE) for dates_range", "self.is_article_prob >= .1: self.is_article_prob -= .1 result = self.get_url_info(a[\"href\"], date) while not result:", "p.find_all(\"a\", attrs={\"class\": \"red\"}) and not p.find_all( \"i\") and not p.find_all(\"span\"): # if p.contents[0]", "* self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob = .5", "a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") == -1: result = self.get_url_info(a[\"href\"], date) while", "= bs.find_all(\"a\") for a in a_list: if \"href\" in a.attrs and a.string and", "crawled_urls_list = list() # for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\",", "# for _date in utils.get_date_list_from_range(start_date, latest_date_str): # query_results = self.query_news(\"Date\", _date) # for", "logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if __name__ == \"__main__\": jrj_spyder = JrjSpyder()", "%(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class JrjSpyder(Spyder): def __init__(self): super(JrjSpyder, self).__init__()", "utils.html_parser(_url) a_list = bs.find_all(\"a\") for a in a_list: if \"href\" in a.attrs and", "time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob =", "server longer than {} minutes, \" \"and the failed url has been written", "# 爬取失败的情况 logging.info(\"[FAILED] {} {}\".format(a.string, a[\"href\"])) else: # 有返回但是article为null的情况 article_specific_date, article = result", "as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than {} minutes, \" \"and", "in database is {} ... \".format(latest_date_str)) # crawled_urls_list = list() # for _date", "\"href\" in a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1:", "{}\".format(article_specific_date, a.string, a[\"href\"])) self.terminated_amount = 0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self,", "a.attrs and a.string and \\ a[\"href\"].find(\"/{}/{}/\".format(date.replace(\"-\", \"\")[:4], date.replace(\"-\", \"\")[4:6])) != -1: if a[\"href\"]", "import logging logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') class", "after \" \"{} seconds...\".format(a[\"href\"], 60 * self.terminated_amount)) time.sleep(60 * self.terminated_amount) result = self.get_url_info(a[\"href\"],", "# # crawled_urls_list = self.extract_data([\"Url\"])[0] # abandoned # logging.info(\"the length of crawled data", "# latest_date_str = max(extracted_data_list).split(\" \")[0] # else: # latest_date_str = start_date # logging.info(\"latest", "def get_url_info(self, url, specific_date): try: bs = utils.html_parser(url) except Exception: return False date", "self.terminated_amount) result = self.get_url_info(a[\"href\"], date) article_specific_date, article = result self.is_article_prob = .5 if", "in dates_range: first_url = \"{}/{}/{}_1.shtml\".format(url, date.replace(\"-\", \"\")[0:6], date.replace(\"-\", \"\")) max_pages_num = utils.search_max_pages_num(first_url, date)", "0 # 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if __name__ ==", "if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\") == -1 and \\ a.string.find(\"新三板挂牌上市\") == -1:", "# 爬取结束后重置该参数 else: logging.info(\"[QUIT] {}\".format(a.string)) def get_realtime_news(self, url): pass if __name__ == \"__main__\":", "= self.db_obj.create_col(self.db, config.COLLECTION_NAME_JRJ) self.terminated_amount = 0 def get_url_info(self, url, specific_date): try: bs =", "p.contents[0] != \"jrj_final_daohang_start1\" and p.attrs == {} and \\ # not p.find_all(\"input\") and", "in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs == {} and \\ not p.find_all(\"input\")", "if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") == -1 and a.string.find(\"报于\")", "\".format(start_date, # latest_date_str, # len(crawled_urls_list))) crawled_urls_list = list() dates_list = utils.get_date_list_from_range(start_date, end_date) dates_separated_into_ranges_list", "\"\")[4:6])) != -1: if a[\"href\"] not in crawled_urls_list: # 如果标题不包含\"收盘\",\"报于\"等字样,即可写入数据库,因为包含这些字样标题的新闻多为机器自动生成 if a.string.find(\"收盘\") ==", "result self.is_article_prob = .5 if article != \"\": data = {\"Date\": article_specific_date, \"Url\":", "article != \"\": data = {\"Date\": article_specific_date, \"Url\": a[\"href\"], \"Title\": a.string, \"Article\": article}", "with open(config.RECORD_JRJ_FAILED_URL_TXT_FILE_PATH, \"a+\") as file: file.write(\"{}\\n\".format(a[\"href\"])) logging.info(\"rejected by remote server longer than {}", "# # 开始重跑程序可以尝试将前面未成功爬取的URL重新再试一遍 # extracted_data_list = self.extract_data([\"Date\"])[0] # if len(extracted_data_list) != 0: #", "query_results = self.query_news(\"Date\", _date) # for qr in query_results: # crawled_urls_list.append(qr[\"Url\"]) # #", "url, specific_date): try: bs = utils.html_parser(url) except Exception: return False date = \"\"", "= utils.search_max_pages_num(first_url, date) for num in range(1, max_pages_num + 1): _url = \"{}/{}/{}_{}.shtml\".format(url,", "article = \"\" for p in bs.find_all(\"p\"): if not p.find_all(\"jrj_final_daohang_start\") and p.attrs ==" ]
[ ".uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное меню, к примеру. ''' CONTROL_TYPE =", "-*- coding: utf-8 -*- from .uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное меню,", "utf-8 -*- from .uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное меню, к примеру.", "coding: utf-8 -*- from .uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное меню, к", "import UIAControl class MenuItem(UIAControl): ''' Контекстное меню, к примеру. ''' CONTROL_TYPE = 'MenuItem'", "from .uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное меню, к примеру. ''' CONTROL_TYPE", "-*- from .uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное меню, к примеру. '''", "# -*- coding: utf-8 -*- from .uia_control import UIAControl class MenuItem(UIAControl): ''' Контекстное" ]
[ "\"\"\"Parse the arguments given by the user. :return: Arguments parsed from the console", "NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add", "= parse_arguments() # If the user wants to add another user if args.a:", "salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL, salt", "(\"users\") is available and new users can me added or the existing ones", "the same time if args.r: print(\"Incompatible actions, please choose only one!\") exit() #", "(i.e if the stored password == digest(salt + password given by the user))", ":param u: username :param given_password: password given by the user :return: True or", "username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return args if", "user tries to add and remove at the same time if args.r: print(\"Incompatible", "file :param db_path: The path to the database file :type db_path: string \"\"\"", "\"\"\" global conn global cursor rows = cursor.execute(\"SELECT * FROM users WHERE username=?\",", "wants to add another user if args.a: # If the user tries to", "conn global cursor conn = sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM", "not users: print(\"No users found!\") else: for i in range(len(users)): print('username: ' +", "users :rtype: list of existing users \"\"\" global conn global cursor cursor.execute('SELECT *", "can be removed. See the argparse options for more information.\"\"\" import sqlite3 import", "= None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite", "for users according to the defiend schema\"\"\" global conn global cursor # Create", "if digest == stored_password.lower(): return True else: return False def parse_arguments(): \"\"\"Parse the", "script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() #", "args.r: print(\"Incompatible actions, please choose only one!\") exit() # if the password is", "hash p = str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user", "args.r: remove_user(args.username) # Show all the users in the database if needed if", "this is useful for the --show parameter :return: list of existing users :rtype:", "list of existing users \"\"\" global conn global cursor cursor.execute('SELECT * FROM users')", "* FROM users WHERE username=?\", (u,)) conn.commit() user = rows.fetchall() # return False", "given username and password :param u: username :type u: string :param p: password", "schema\"\"\" global conn global cursor # Create table with username, password and salt", "if a user is allowed tu perform the action :param u: username :param", "# if the user already exists, replace its password and salt cursor.execute(\"INSERT OR", "if not users: print(\"No users found!\") else: for i in range(len(users)): print('username: '", "False if no user is found with that username if len(user) == 0:", "database given the path to the .db file :param db_path: The path to", ":type db_path: string \"\"\" global conn global cursor conn = sqlite3.connect(db_path) cursor =", "p: password :type p: string \"\"\" global conn global cursor salt = random.randint(1,", "parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\",", "\"\"\" global conn global cursor cursor.execute('SELECT * FROM users') users = cursor.fetchall() if", "table for users according to the defiend schema\"\"\" global conn global cursor #", "# return False if the user is found but the password is incorrect", "helps a user to manage the database. Only one table (\"users\") is available", "username, password and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL, password CHARACTER(256)", ":param u: username :type u: string \"\"\" global conn global cursor cursor.execute(\"DELETE FROM", "try: cursor.execute(\"SELECT * FROM users\") # if the table does not exist create", "on the folder where the script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path))", "TABLE users (username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT", "\"\"\" global conn global cursor salt = random.randint(1, 1000000) # add the salt", "INTO users VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a user", "the password is incorrect if digest == stored_password.lower(): return True else: return False", "= random.randint(1, 1000000) # add the salt to the password before computing the", "conn global cursor cursor.execute(\"DELETE FROM users WHERE username = ?\", (u,)) conn.commit() def", "no user is found with that username if len(user) == 0: return False", "existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True, default=None) parser.add_argument('-password', help=\"the username", "global conn global cursor rows = cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,))", "users, this is useful for the --show parameter :return: list of existing users", "the user tries to add and remove at the same time if args.r:", "p: string \"\"\" global conn global cursor salt = random.randint(1, 1000000) # add", "(u,)) conn.commit() def get_users(): \"\"\"Get all the existing users, this is useful for", "given his username :param u: username :type u: string \"\"\" global conn global", "remove_user(u): \"\"\"Remove a user from the database given his username :param u: username", "parse_arguments() # If the user wants to add another user if args.a: #", "the user. :return: Arguments parsed from the console :rtype: list \"\"\" parser =", "help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True, default=None) parser.add_argument('-password',", "module helps a user to manage the database. Only one table (\"users\") is", "user to the database given username and password :param u: username :type u:", "salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add a new", "the existing ones can be removed. See the argparse options for more information.\"\"\"", "string \"\"\" global conn global cursor salt = random.randint(1, 1000000) # add the", "conn.cursor() try: cursor.execute(\"SELECT * FROM users\") # if the table does not exist", "username :type u: string :param p: password :type p: string \"\"\" global conn", "return False # check if the stored password is correct # (i.e if", "def create_users_table(): \"\"\"Create table for users according to the defiend schema\"\"\" global conn", "and salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\", (u, digest, salt)) conn.commit()", "file :type db_path: string \"\"\" global conn global cursor conn = sqlite3.connect(db_path) cursor", "global cursor salt = random.randint(1, 1000000) # add the salt to the password", "digest(salt + password given by the user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt", "options for more information.\"\"\" import sqlite3 import argparse import os import random import", "\"\"\" global conn global cursor cursor.execute(\"DELETE FROM users WHERE username = ?\", (u,))", "user is found with that username if len(user) == 0: return False #", "correct # (i.e if the stored password == digest(salt + password given by", "cursor.execute('SELECT * FROM users') users = cursor.fetchall() if len(users) > 0: return users", "= rows.fetchall() # return False if no user is found with that username", "conn.commit() def remove_user(u): \"\"\"Remove a user from the database given his username :param", "OR REPLACE INTO users VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove", "the table does not exist create one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create", "__name__ == \"__main__\": # get the correct path based on the folder where", "rows = cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,)) conn.commit() user = rows.fetchall()", "at the same time if args.r: print(\"Incompatible actions, please choose only one!\") exit()", "# (i.e if the stored password == digest(salt + password given by the", "If the user wants to remove another user if args.r: remove_user(args.username) # Show", "stored_password.lower(): return True else: return False def parse_arguments(): \"\"\"Parse the arguments given by", "Create table with username, password and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT", "password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username", "choose a password as well!\") exit() add_user(args.username, args.password) # If the user wants", "FROM users') users = cursor.fetchall() if len(users) > 0: return users return False", "cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256)", "incorrect if digest == stored_password.lower(): return True else: return False def parse_arguments(): \"\"\"Parse", "= conn.cursor() try: cursor.execute(\"SELECT * FROM users\") # if the table does not", "users in the database if needed if args.show: print('Retrieving all existing users...') users", "salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def", "\"\"\"Add a new user to the database given username and password :param u:", "u: string \"\"\" global conn global cursor cursor.execute(\"DELETE FROM users WHERE username =", "the console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\",", "given if not args.password: print(\"Please choose a password as well!\") exit() add_user(args.username, args.password)", "please choose only one!\") exit() # if the password is not given if", "actions, please choose only one!\") exit() # if the password is not given", "password and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT", "(username))''') def add_user(u, p): \"\"\"Add a new user to the database given username", "the user :return: True or False based on the user's permission :rtype: Boolean", "is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() # If", "+ given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the", "not given if not args.password: print(\"Please choose a password as well!\") exit() add_user(args.username,", "given by the user :return: True or False based on the user's permission", "WHERE username=?\", (u,)) conn.commit() user = rows.fetchall() # return False if no user", "more information.\"\"\" import sqlite3 import argparse import os import random import hashlib conn", "0: return False # check if the stored password is correct # (i.e", "= sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM users\") # if the", "print(\"No users found!\") else: for i in range(len(users)): print('username: ' + users[i][0], '\\tpassword:", "stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password = user[0][1] digest =", "another user if args.r: remove_user(args.username) # Show all the users in the database", "already exists, replace its password and salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES", "help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return args", "the action :param u: username :param given_password: password given by the user :return:", "WHERE username = ?\", (u,)) conn.commit() def get_users(): \"\"\"Get all the existing users,", "and remove at the same time if args.r: print(\"Incompatible actions, please choose only", "add the salt to the password before computing the hash p = str(salt)", ":rtype: Boolean \"\"\" global conn global cursor rows = cursor.execute(\"SELECT * FROM users", "(u,)) conn.commit() user = rows.fetchall() # return False if no user is found", "the defiend schema\"\"\" global conn global cursor # Create table with username, password", "or the existing ones can be removed. See the argparse options for more", "global conn global cursor cursor.execute('SELECT * FROM users') users = cursor.fetchall() if len(users)", "password is not given if not args.password: print(\"Please choose a password as well!\")", "needed if args.show: print('Retrieving all existing users...') users = get_users() if not users:", "See the argparse options for more information.\"\"\" import sqlite3 import argparse import os", "parser = argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password", "action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False,", "required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return args if __name__ ==", "VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a user from the", ":return: Arguments parsed from the console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users", "= <PASSWORD>_salt + given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False", "db_abs_path)) open_and_create(db_path) args = parse_arguments() # If the user wants to add another", "password is incorrect if digest == stored_password.lower(): return True else: return False def", "digest == stored_password.lower(): return True else: return False def parse_arguments(): \"\"\"Parse the arguments", "found but the password is incorrect if digest == stored_password.lower(): return True else:", "his username :param u: username :type u: string \"\"\" global conn global cursor", ":rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add username", "from the database given his username :param u: username :type u: string \"\"\"", "random import hashlib conn = None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def", "global conn global cursor salt = random.randint(1, 1000000) # add the salt to", "def add_user(u, p): \"\"\"Add a new user to the database given username and", "database given username and password :param u: username :type u: string :param p:", "that username if len(user) == 0: return False # check if the stored", "False def is_allowed(u, given_password): \"\"\"Check if a user is allowed tu perform the", "* FROM users') users = cursor.fetchall() if len(users) > 0: return users return", "len(users) > 0: return users return False def is_allowed(u, given_password): \"\"\"Check if a", "args.a: # If the user tries to add and remove at the same", "with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a", "given_password: password given by the user :return: True or False based on the", "= 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database given the path to the", "username if len(user) == 0: return False # check if the stored password", "--show parameter :return: list of existing users :rtype: list of existing users \"\"\"", "the salt to the password before computing the hash p = str(salt) +", "digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user is found but the", "# add the salt to the password before computing the hash p =", "console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add", "cursor # Create table with username, password and salt cursor.execute('''CREATE TABLE users (username", "tu perform the action :param u: username :param given_password: password given by the", "by the user. :return: Arguments parsed from the console :rtype: list \"\"\" parser", "ones can be removed. See the argparse options for more information.\"\"\" import sqlite3", "> 0: return users return False def is_allowed(u, given_password): \"\"\"Check if a user", "password as well!\") exit() add_user(args.username, args.password) # If the user wants to remove", "existing users \"\"\" global conn global cursor cursor.execute('SELECT * FROM users') users =", "a user to manage the database. Only one table (\"users\") is available and", "manage the database. Only one table (\"users\") is available and new users can", "* FROM users\") # if the table does not exist create one except", "username=?\", (u,)) conn.commit() user = rows.fetchall() # return False if no user is", "if the table does not exist create one except sqlite3.OperationalError: create_users_table() def create_users_table():", "to sqlite database given the path to the .db file :param db_path: The", "information.\"\"\" import sqlite3 import argparse import os import random import hashlib conn =", "string :param p: password :type p: string \"\"\" global conn global cursor salt", "the arguments given by the user. :return: Arguments parsed from the console :rtype:", "Only one table (\"users\") is available and new users can me added or", "+ p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists, replace its", "with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\",", "database given his username :param u: username :type u: string \"\"\" global conn", "the users in the database if needed if args.show: print('Retrieving all existing users...')", "= argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\",", "else: return False def parse_arguments(): \"\"\"Parse the arguments given by the user. :return:", "existing users, this is useful for the --show parameter :return: list of existing", "db_path: The path to the database file :type db_path: string \"\"\" global conn", "users WHERE username=?\", (u,)) conn.commit() user = rows.fetchall() # return False if no", "if args.r: print(\"Incompatible actions, please choose only one!\") exit() # if the password", "to manage the database. Only one table (\"users\") is available and new users", "if args.r: remove_user(args.username) # Show all the users in the database if needed", "user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password = user[0][1] digest", "digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a user from the database given his", "conn.commit() user = rows.fetchall() # return False if no user is found with", "users WHERE username = ?\", (u,)) conn.commit() def get_users(): \"\"\"Get all the existing", "users / Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\",", "global cursor rows = cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,)) conn.commit() user", "<gh_stars>0 \"\"\"This module helps a user to manage the database. Only one table", "parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return args if __name__ == \"__main__\": #", "user if args.a: # If the user tries to add and remove at", "global conn global cursor conn = sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT *", "well!\") exit() add_user(args.username, args.password) # If the user wants to remove another user", "str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() #", "def parse_arguments(): \"\"\"Parse the arguments given by the user. :return: Arguments parsed from", "invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() # If the", "perform the action :param u: username :param given_password: password given by the user", "is found but the password is incorrect if digest == stored_password.lower(): return True", "# Show all the users in the database if needed if args.show: print('Retrieving", "get the correct path based on the folder where the script is invoked", "defiend schema\"\"\" global conn global cursor # Create table with username, password and", "if the stored password is correct # (i.e if the stored password ==", "== \"__main__\": # get the correct path based on the folder where the", "of existing users :rtype: list of existing users \"\"\" global conn global cursor", "cursor.execute(\"SELECT * FROM users\") # if the table does not exist create one", "salt)) conn.commit() def remove_user(u): \"\"\"Remove a user from the database given his username", "remove another user if args.r: remove_user(args.username) # Show all the users in the", "tries to add and remove at the same time if args.r: print(\"Incompatible actions,", "/ Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove", "the hash p = str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the", "a user from the database given his username :param u: username :type u:", "random.randint(1, 1000000) # add the salt to the password before computing the hash", ":type u: string \"\"\" global conn global cursor cursor.execute(\"DELETE FROM users WHERE username", "and new users can me added or the existing ones can be removed.", "db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() # If the user wants", "to the .db file :param db_path: The path to the database file :type", "rows.fetchall() # return False if no user is found with that username if", "argparse options for more information.\"\"\" import sqlite3 import argparse import os import random", "the user already exists, replace its password and salt cursor.execute(\"INSERT OR REPLACE INTO", "db_path: string \"\"\" global conn global cursor conn = sqlite3.connect(db_path) cursor = conn.cursor()", ".db file :param db_path: The path to the database file :type db_path: string", "the script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments()", "# return False if no user is found with that username if len(user)", "user if args.r: remove_user(args.username) # Show all the users in the database if", "global cursor cursor.execute(\"DELETE FROM users WHERE username = ?\", (u,)) conn.commit() def get_users():", "def get_users(): \"\"\"Get all the existing users, this is useful for the --show", ":return: True or False based on the user's permission :rtype: Boolean \"\"\" global", "does not exist create one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for", "before computing the hash p = str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() #", "get_users(): \"\"\"Get all the existing users, this is useful for the --show parameter", "\"\"\"This module helps a user to manage the database. Only one table (\"users\")", "is_allowed(u, given_password): \"\"\"Check if a user is allowed tu perform the action :param", "False if the user is found but the password is incorrect if digest", "cursor rows = cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,)) conn.commit() user =", "one table (\"users\") is available and new users can me added or the", "users found!\") else: for i in range(len(users)): print('username: ' + users[i][0], '\\tpassword: '", "as well!\") exit() add_user(args.username, args.password) # If the user wants to remove another", "table (\"users\") is available and new users can me added or the existing", "create one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for users according to", "\"\"\"Check if a user is allowed tu perform the action :param u: username", "list \"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u'", ":param given_password: password given by the user :return: True or False based on", "# get the correct path based on the folder where the script is", "the user wants to remove another user if args.r: remove_user(args.username) # Show all", "cursor cursor.execute('SELECT * FROM users') users = cursor.fetchall() if len(users) > 0: return", "add and remove at the same time if args.r: print(\"Incompatible actions, please choose", "# check if the stored password is correct # (i.e if the stored", "user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user is found but", "choose only one!\") exit() # if the password is not given if not", "add_user(args.username, args.password) # If the user wants to remove another user if args.r:", "# If the user wants to add another user if args.a: # If", "a new user to the database given username and password :param u: username", "\"__main__\": # get the correct path based on the folder where the script", "= get_users() if not users: print(\"No users found!\") else: for i in range(len(users)):", "the database given username and password :param u: username :type u: string :param", "password given by the user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt + given_password", "users according to the defiend schema\"\"\" global conn global cursor # Create table", "parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with", "args.show: print('Retrieving all existing users...') users = get_users() if not users: print(\"No users", "import hashlib conn = None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path):", "the database file :type db_path: string \"\"\" global conn global cursor conn =", "can me added or the existing ones can be removed. See the argparse", "# Create table with username, password and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256)", "user wants to add another user if args.a: # If the user tries", "stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user is", "= hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists, replace its password and salt", "database file :type db_path: string \"\"\" global conn global cursor conn = sqlite3.connect(db_path)", "cursor conn = sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM users\") #", "only one!\") exit() # if the password is not given if not args.password:", "not args.password: print(\"Please choose a password as well!\") exit() add_user(args.username, args.password) # If", "user is found but the password is incorrect if digest == stored_password.lower(): return", "is incorrect if digest == stored_password.lower(): return True else: return False def parse_arguments():", "parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return", "all existing users...') users = get_users() if not users: print(\"No users found!\") else:", "The path to the database file :type db_path: string \"\"\" global conn global", "= cursor.fetchall() if len(users) > 0: return users return False def is_allowed(u, given_password):", "to the password before computing the hash p = str(salt) + p digest", "False # check if the stored password is correct # (i.e if the", "given the path to the .db file :param db_path: The path to the", ":rtype: list of existing users \"\"\" global conn global cursor cursor.execute('SELECT * FROM", "= user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user is found", "NOT NULL, PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add a new user to", "open_and_create(db_path): \"\"\"Connect to sqlite database given the path to the .db file :param", "return True else: return False def parse_arguments(): \"\"\"Parse the arguments given by the", "check if the stored password is correct # (i.e if the stored password", "the --show parameter :return: list of existing users :rtype: list of existing users", "and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL,", "== digest(salt + password given by the user)) stored_salt = str(user[0][2]) given_password =", "found with that username if len(user) == 0: return False # check if", "print('Retrieving all existing users...') users = get_users() if not users: print(\"No users found!\")", "cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,)) conn.commit() user = rows.fetchall() # return", "= None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database given the", "cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM users\") # if the table does", "Arguments parsed from the console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users /", "+ password given by the user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt +", "the existing users, this is useful for the --show parameter :return: list of", "users = get_users() if not users: print(\"No users found!\") else: for i in", "KEY (username))''') def add_user(u, p): \"\"\"Add a new user to the database given", "def is_allowed(u, given_password): \"\"\"Check if a user is allowed tu perform the action", "exist create one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for users according", "CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add a new user", "= ?\", (u,)) conn.commit() def get_users(): \"\"\"Get all the existing users, this is", "parse_arguments(): \"\"\"Parse the arguments given by the user. :return: Arguments parsed from the", "(u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a user from the database given", "os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() # If the user wants to add", "= os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() # If the user wants to", "return False def parse_arguments(): \"\"\"Parse the arguments given by the user. :return: Arguments", "# if the password is not given if not args.password: print(\"Please choose a", "sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM users\") # if the table", "cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database given", ":param p: password :type p: string \"\"\" global conn global cursor salt =", "open_and_create(db_path) args = parse_arguments() # If the user wants to add another user", "\"\"\"Get all the existing users, this is useful for the --show parameter :return:", "the password before computing the hash p = str(salt) + p digest =", "'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database given the path to the .db", "for more information.\"\"\" import sqlite3 import argparse import os import random import hashlib", "= str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists,", "users VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a user from", "\"\"\"Remove a user from the database given his username :param u: username :type", "conn = None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to", "return args if __name__ == \"__main__\": # get the correct path based on", "user already exists, replace its password and salt cursor.execute(\"INSERT OR REPLACE INTO users", "salt = random.randint(1, 1000000) # add the salt to the password before computing", "not exist create one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for users", "password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return args if __name__", "if not args.password: print(\"Please choose a password as well!\") exit() add_user(args.username, args.password) #", ":param db_path: The path to the database file :type db_path: string \"\"\" global", "users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\",", "u: string :param p: password :type p: string \"\"\" global conn global cursor", "wants to remove another user if args.r: remove_user(args.username) # Show all the users", "Boolean \"\"\" global conn global cursor rows = cursor.execute(\"SELECT * FROM users WHERE", "action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing", "\"\"\" global conn global cursor conn = sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT", "sqlite3 import argparse import os import random import hashlib conn = None cursor", "== stored_password.lower(): return True else: return False def parse_arguments(): \"\"\"Parse the arguments given", "is correct # (i.e if the stored password == digest(salt + password given", "username :type u: string \"\"\" global conn global cursor cursor.execute(\"DELETE FROM users WHERE", "its password and salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\", (u, digest,", "the path to the .db file :param db_path: The path to the database", "cursor salt = random.randint(1, 1000000) # add the salt to the password before", "the user wants to add another user if args.a: # If the user", "but the password is incorrect if digest == stored_password.lower(): return True else: return", "existing ones can be removed. See the argparse options for more information.\"\"\" import", "password :param u: username :type u: string :param p: password :type p: string", "permission :rtype: Boolean \"\"\" global conn global cursor rows = cursor.execute(\"SELECT * FROM", "given by the user. :return: Arguments parsed from the console :rtype: list \"\"\"", "or False based on the user's permission :rtype: Boolean \"\"\" global conn global", "cursor cursor.execute(\"DELETE FROM users WHERE username = ?\", (u,)) conn.commit() def get_users(): \"\"\"Get", "FROM users WHERE username = ?\", (u,)) conn.commit() def get_users(): \"\"\"Get all the", "remove_user(args.username) # Show all the users in the database if needed if args.show:", "True else: return False def parse_arguments(): \"\"\"Parse the arguments given by the user.", "path based on the folder where the script is invoked in db_path =", "by the user :return: True or False based on the user's permission :rtype:", "get_users() if not users: print(\"No users found!\") else: for i in range(len(users)): print('username:", "existing users...') users = get_users() if not users: print(\"No users found!\") else: for", "stored password is correct # (i.e if the stored password == digest(salt +", "action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True,", "NULL, PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add a new user to the", "with that username if len(user) == 0: return False # check if the", "name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args", "and password :param u: username :type u: string :param p: password :type p:", "hashlib conn = None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect", ":type u: string :param p: password :type p: string \"\"\" global conn global", "existing users :rtype: list of existing users \"\"\" global conn global cursor cursor.execute('SELECT", "time if args.r: print(\"Incompatible actions, please choose only one!\") exit() # if the", "by the user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password =", "if __name__ == \"__main__\": # get the correct path based on the folder", "cursor.execute(\"DELETE FROM users WHERE username = ?\", (u,)) conn.commit() def get_users(): \"\"\"Get all", "a user is allowed tu perform the action :param u: username :param given_password:", "the database if needed if args.show: print('Retrieving all existing users...') users = get_users()", "# If the user wants to remove another user if args.r: remove_user(args.username) #", "conn = sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM users\") # if", "1000000) # add the salt to the password before computing the hash p", "return users return False def is_allowed(u, given_password): \"\"\"Check if a user is allowed", "users = cursor.fetchall() if len(users) > 0: return users return False def is_allowed(u,", "available and new users can me added or the existing ones can be", "hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists, replace its password and salt cursor.execute(\"INSERT", "based on the folder where the script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(),", "?\", (u,)) conn.commit() def get_users(): \"\"\"Get all the existing users, this is useful", "Show all the users in the database if needed if args.show: print('Retrieving all", "table with username, password and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL,", "users...') users = get_users() if not users: print(\"No users found!\") else: for i", "p = str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already", "a username name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\",", "import os import random import hashlib conn = None cursor = None db_abs_path", "salt to the password before computing the hash p = str(salt) + p", "the database given his username :param u: username :type u: string \"\"\" global", "create_users_table() def create_users_table(): \"\"\"Create table for users according to the defiend schema\"\"\" global", "os import random import hashlib conn = None cursor = None db_abs_path =", "username and password :param u: username :type u: string :param p: password :type", "path to the database file :type db_path: string \"\"\" global conn global cursor", "parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True, default=None)", "NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add a", "arguments given by the user. :return: Arguments parsed from the console :rtype: list", "action :param u: username :param given_password: password given by the user :return: True", "parsed from the console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove", "given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user", "= parser.parse_args() return args if __name__ == \"__main__\": # get the correct path", ":type p: string \"\"\" global conn global cursor salt = random.randint(1, 1000000) #", "password before computing the hash p = str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest()", "user is allowed tu perform the action :param u: username :param given_password: password", "import random import hashlib conn = None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db'", "if len(users) > 0: return users return False def is_allowed(u, given_password): \"\"\"Check if", "import sqlite3 import argparse import os import random import hashlib conn = None", "import argparse import os import random import hashlib conn = None cursor =", "folder where the script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args", "if the user already exists, replace its password and salt cursor.execute(\"INSERT OR REPLACE", "one!\") exit() # if the password is not given if not args.password: print(\"Please", "another user if args.a: # If the user tries to add and remove", "def remove_user(u): \"\"\"Remove a user from the database given his username :param u:", "all the users in the database if needed if args.show: print('Retrieving all existing", "string \"\"\" global conn global cursor cursor.execute(\"DELETE FROM users WHERE username = ?\",", "username name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\")", "user wants to remove another user if args.r: remove_user(args.username) # Show all the", "exit() add_user(args.username, args.password) # If the user wants to remove another user if", "to the defiend schema\"\"\" global conn global cursor # Create table with username,", "table does not exist create one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table", "parser.add_argument('-username', help=\"add a username name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None)", "= cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,)) conn.commit() user = rows.fetchall() #", "print(\"Please choose a password as well!\") exit() add_user(args.username, args.password) # If the user", "(?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a user from the database", "users') users = cursor.fetchall() if len(users) > 0: return users return False def", "NOT NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''')", "'-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all", "is available and new users can me added or the existing ones can", "password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show", "in the database if needed if args.show: print('Retrieving all existing users...') users =", "version=\"1.0\") args = parser.parse_args() return args if __name__ == \"__main__\": # get the", "help=\"add a username name\", required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\",", "according to the defiend schema\"\"\" global conn global cursor # Create table with", "stored password == digest(salt + password given by the user)) stored_salt = str(user[0][2])", "exists, replace its password and salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\",", "'-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\", action=\"store_true\")", "the password is not given if not args.password: print(\"Please choose a password as", "except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for users according to the defiend", "path to the .db file :param db_path: The path to the database file", "is allowed tu perform the action :param u: username :param given_password: password given", "required=True, default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args =", "global cursor conn = sqlite3.connect(db_path) cursor = conn.cursor() try: cursor.execute(\"SELECT * FROM users\")", "users can me added or the existing ones can be removed. See the", "new users can me added or the existing ones can be removed. See", "parser.parse_args() return args if __name__ == \"__main__\": # get the correct path based", "return False if the user is found but the password is incorrect if", "password given by the user :return: True or False based on the user's", "the user is found but the password is incorrect if digest == stored_password.lower():", "args if __name__ == \"__main__\": # get the correct path based on the", "True or False based on the user's permission :rtype: Boolean \"\"\" global conn", "be removed. See the argparse options for more information.\"\"\" import sqlite3 import argparse", "found!\") else: for i in range(len(users)): print('username: ' + users[i][0], '\\tpassword: ' +", "= hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user is found but the password", "cursor.fetchall() if len(users) > 0: return users return False def is_allowed(u, given_password): \"\"\"Check", "# If the user tries to add and remove at the same time", "return False def is_allowed(u, given_password): \"\"\"Check if a user is allowed tu perform", "if the stored password == digest(salt + password given by the user)) stored_salt", "the argparse options for more information.\"\"\" import sqlite3 import argparse import os import", "string \"\"\" global conn global cursor conn = sqlite3.connect(db_path) cursor = conn.cursor() try:", "args.password: print(\"Please choose a password as well!\") exit() add_user(args.username, args.password) # If the", "users\") # if the table does not exist create one except sqlite3.OperationalError: create_users_table()", "add_user(u, p): \"\"\"Add a new user to the database given username and password", "FROM users\") # if the table does not exist create one except sqlite3.OperationalError:", "cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u):", "if len(user) == 0: return False # check if the stored password is", "False def parse_arguments(): \"\"\"Parse the arguments given by the user. :return: Arguments parsed", "the stored password is correct # (i.e if the stored password == digest(salt", "is not given if not args.password: print(\"Please choose a password as well!\") exit()", "If the user wants to add another user if args.a: # If the", "if needed if args.show: print('Retrieving all existing users...') users = get_users() if not", "args.password) # If the user wants to remove another user if args.r: remove_user(args.username)", "database. Only one table (\"users\") is available and new users can me added", "users: print(\"No users found!\") else: for i in range(len(users)): print('username: ' + users[i][0],", "if args.a: # If the user tries to add and remove at the", "== 0: return False # check if the stored password is correct #", "If the user tries to add and remove at the same time if", "= str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest()", "\"\"\"Create table for users according to the defiend schema\"\"\" global conn global cursor", "the folder where the script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path)", "user. :return: Arguments parsed from the console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add", "\"\"\"Connect to sqlite database given the path to the .db file :param db_path:", "useful for the --show parameter :return: list of existing users :rtype: list of", "user :return: True or False based on the user's permission :rtype: Boolean \"\"\"", "to add and remove at the same time if args.r: print(\"Incompatible actions, please", "password :type p: string \"\"\" global conn global cursor salt = random.randint(1, 1000000)", "username = ?\", (u,)) conn.commit() def get_users(): \"\"\"Get all the existing users, this", "username :param u: username :type u: string \"\"\" global conn global cursor cursor.execute(\"DELETE", "global conn global cursor # Create table with username, password and salt cursor.execute('''CREATE", "str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists, replace", "given_password): \"\"\"Check if a user is allowed tu perform the action :param u:", "the stored password == digest(salt + password given by the user)) stored_salt =", "p): \"\"\"Add a new user to the database given username and password :param", "list of existing users :rtype: list of existing users \"\"\" global conn global", "the database. Only one table (\"users\") is available and new users can me", "user to manage the database. Only one table (\"users\") is available and new", "False based on the user's permission :rtype: Boolean \"\"\" global conn global cursor", ":return: list of existing users :rtype: list of existing users \"\"\" global conn", "conn global cursor # Create table with username, password and salt cursor.execute('''CREATE TABLE", "u: username :type u: string :param p: password :type p: string \"\"\" global", "add another user if args.a: # If the user tries to add and", "help=\"Add username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password", "username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u' with password '-p'\",", "to the database file :type db_path: string \"\"\" global conn global cursor conn", "correct path based on the folder where the script is invoked in db_path", "digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists, replace its password and", "users (username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL,", "help=\"Remove username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\")", "user from the database given his username :param u: username :type u: string", "def open_and_create(db_path): \"\"\"Connect to sqlite database given the path to the .db file", "new user to the database given username and password :param u: username :type", "in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args = parse_arguments() # If the user", "the .db file :param db_path: The path to the database file :type db_path:", "password == digest(salt + password given by the user)) stored_salt = str(user[0][2]) given_password", "'-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\",", "based on the user's permission :rtype: Boolean \"\"\" global conn global cursor rows", "CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def add_user(u, p):", "is useful for the --show parameter :return: list of existing users :rtype: list", "else: for i in range(len(users)): print('username: ' + users[i][0], '\\tpassword: ' + users[i][1])", "removed. See the argparse options for more information.\"\"\" import sqlite3 import argparse import", "from the console :rtype: list \"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove users\")", "parameter :return: list of existing users :rtype: list of existing users \"\"\" global", "the correct path based on the folder where the script is invoked in", "CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY", "conn.commit() def get_users(): \"\"\"Get all the existing users, this is useful for the", "hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if the user is found but the password is", "default=None) parser.add_argument('-password', help=\"the username password\", required=False, default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args()", "if the user is found but the password is incorrect if digest ==", "user = rows.fetchall() # return False if no user is found with that", "if no user is found with that username if len(user) == 0: return", "'-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add", "0: return users return False def is_allowed(u, given_password): \"\"\"Check if a user is", "create_users_table(): \"\"\"Create table for users according to the defiend schema\"\"\" global conn global", "global cursor # Create table with username, password and salt cursor.execute('''CREATE TABLE users", "users return False def is_allowed(u, given_password): \"\"\"Check if a user is allowed tu", "username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-show\", help=\"Show all existing users\", action=\"store_true\") parser.add_argument('-username',", "sqlite database given the path to the .db file :param db_path: The path", "database if needed if args.show: print('Retrieving all existing users...') users = get_users() if", "# if the table does not exist create one except sqlite3.OperationalError: create_users_table() def", "replace its password and salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\", (u,", "to the database given username and password :param u: username :type u: string", "conn global cursor salt = random.randint(1, 1000000) # add the salt to the", "username :param given_password: password given by the user :return: True or False based", "where the script is invoked in db_path = os.path.abspath(os.path.join(os.getcwd(), db_abs_path)) open_and_create(db_path) args =", "(username CHARACTER(256) NOT NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY", "global conn global cursor cursor.execute(\"DELETE FROM users WHERE username = ?\", (u,)) conn.commit()", "of existing users \"\"\" global conn global cursor cursor.execute('SELECT * FROM users') users", "argparse import os import random import hashlib conn = None cursor = None", "\"\"\" parser = argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with", "all the existing users, this is useful for the --show parameter :return: list", "args = parse_arguments() # If the user wants to add another user if", "remove at the same time if args.r: print(\"Incompatible actions, please choose only one!\")", "<PASSWORD>_salt + given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return False if", "conn global cursor rows = cursor.execute(\"SELECT * FROM users WHERE username=?\", (u,)) conn.commit()", "users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username '-u'", "args = parser.parse_args() return args if __name__ == \"__main__\": # get the correct", "password and salt cursor.execute(\"INSERT OR REPLACE INTO users VALUES (?,?,?)\", (u, digest, salt))", "u: username :type u: string \"\"\" global conn global cursor cursor.execute(\"DELETE FROM users", "the user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password = user[0][1]", "NULL, password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def", "to add another user if args.a: # If the user tries to add", "default=None) parser.add_argument(\"--version\", action=\"version\", version=\"1.0\") args = parser.parse_args() return args if __name__ == \"__main__\":", "to remove another user if args.r: remove_user(args.username) # Show all the users in", "user's permission :rtype: Boolean \"\"\" global conn global cursor rows = cursor.execute(\"SELECT *", "for the --show parameter :return: list of existing users :rtype: list of existing", "exit() # if the password is not given if not args.password: print(\"Please choose", "allowed tu perform the action :param u: username :param given_password: password given by", "password is correct # (i.e if the stored password == digest(salt + password", "users \"\"\" global conn global cursor cursor.execute('SELECT * FROM users') users = cursor.fetchall()", "conn global cursor cursor.execute('SELECT * FROM users') users = cursor.fetchall() if len(users) >", "me added or the existing ones can be removed. See the argparse options", "argparse.ArgumentParser(description=\"Add users / Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\", action=\"store_true\")", "PRIMARY KEY (username))''') def add_user(u, p): \"\"\"Add a new user to the database", "p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if the user already exists, replace its password", "a password as well!\") exit() add_user(args.username, args.password) # If the user wants to", "password CHARACTER(256) NOT NULL, salt CHARACTER(256) NOT NULL, PRIMARY KEY (username))''') def add_user(u,", "computing the hash p = str(salt) + p digest = hashlib.sha256(p.encode('utf-8')).hexdigest() # if", "all existing users\", action=\"store_true\") parser.add_argument('-username', help=\"add a username name\", required=True, default=None) parser.add_argument('-password', help=\"the", "print(\"Incompatible actions, please choose only one!\") exit() # if the password is not", "the user's permission :rtype: Boolean \"\"\" global conn global cursor rows = cursor.execute(\"SELECT", "with username, password and salt cursor.execute('''CREATE TABLE users (username CHARACTER(256) NOT NULL, password", "REPLACE INTO users VALUES (?,?,?)\", (u, digest, salt)) conn.commit() def remove_user(u): \"\"\"Remove a", "return False if no user is found with that username if len(user) ==", "is found with that username if len(user) == 0: return False # check", "if args.show: print('Retrieving all existing users...') users = get_users() if not users: print(\"No", ":param u: username :type u: string :param p: password :type p: string \"\"\"", "db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database given the path to", "given by the user)) stored_salt = str(user[0][2]) given_password = <PASSWORD>_salt + given_password stored_password", "on the user's permission :rtype: Boolean \"\"\" global conn global cursor rows =", "global cursor cursor.execute('SELECT * FROM users') users = cursor.fetchall() if len(users) > 0:", "Remove users\") parser.add_argument(\"-a\", help=\"Add username '-u' with password '-p'\", action=\"store_true\") parser.add_argument(\"-r\", help=\"Remove username", "sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for users according to the defiend schema\"\"\"", "None cursor = None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database", "if the password is not given if not args.password: print(\"Please choose a password", "FROM users WHERE username=?\", (u,)) conn.commit() user = rows.fetchall() # return False if", "one except sqlite3.OperationalError: create_users_table() def create_users_table(): \"\"\"Create table for users according to the", "given_password = <PASSWORD>_salt + given_password stored_password = user[0][1] digest = hashlib.sha256(given_password.encode('utf-8')).hexdigest() # return", "len(user) == 0: return False # check if the stored password is correct", "same time if args.r: print(\"Incompatible actions, please choose only one!\") exit() # if", "None db_abs_path = 'earthquakes_package/scripts/database.db' def open_and_create(db_path): \"\"\"Connect to sqlite database given the path", "action=\"version\", version=\"1.0\") args = parser.parse_args() return args if __name__ == \"__main__\": # get", "added or the existing ones can be removed. See the argparse options for", "u: username :param given_password: password given by the user :return: True or False" ]
[ "== 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label = 'Counts\\n[counts]' #assert", "# AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS", "+ data_rate + '*' + dtype+'*' + _species + '*' + data_units +", "== 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif", "_species = 'electron' eis_sc_check = tnames('mms*eis*' + data_rate + '*' + dtype+'*' +", "reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy", "suffix of the loaded data; useful for preserving original tplot var species: str", "[species] if not isinstance(datatype, list): datatype = [datatype] out_vars = [] for species_id", "omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average", "'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label", "units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is", "if it's installed, otherwise use the numpy one # bottleneck nanmean is ~2.5x", "= 'mmsx_epd_eis_' + data_rate + '_' + level + '_' + dtype +", "time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's pitch angle dist for", "sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True)", "numpy one # bottleneck nanmean is ~2.5x faster try: import bottleneck as bn", "spin variable -- now ending procedure.') return # find where the spin starts", "options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='',", "proton, oxygen, alpha or electron (default: 'proton') Returns: Name of tplot variables created.", "= 'Counts\\n[counts]' #assert type(datatype) is str if not isinstance(species, list): species = [species]", "variable from EIS on multiple MMS spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy',", "ImportError: nanmean = np.nanmean from pytplot import get_data, store_data, options from ...utilities.tnames import", "A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data loaded!')", "in species: for dtype in datatype: # retrieve: omni variables of species to", "find EIS spin variable -- now ending procedure.') return # find where the", "arguments passed to modules are of lowecase if data_units == 'flux': units_label =", "'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*' + data_rate + '*' + dtype+'*'", "dtype in datatype: # retrieve: omni variables of species to determine # of", "Ensure arguments passed to modules are of lowecase if data_units == 'flux': units_label", "flux over all spacecraft for tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee]", "options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) #", "var species: str species for calculation, e.g., proton, oxygen, alpha or electron (default:", "species to determine # of probes _species = species_id if dtype == 'electronenergy':", "tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name,", "'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units", "'.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if", "refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's", "spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1 sp", "'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average", "SMALLEST NUMBER OF TIME STEPS TO USE # AS A REFERENCE SPACECRAFT omni_vars", "minimum time/energy # Note: I did not split these tuples as the namespace", "spin_starts[spin_idx] + 1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]})", "+ '*' + data_units + '*omni'+ suffix) # process multiple probes probes =", "= len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size =", "time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]),", "#print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] =", "Parameters ---------- datatype: str 'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate: str instrument", "= np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data", "omni variables of species to determine # of probes _species = species_id if", "data_units + '*omni'+ suffix) # process multiple probes probes = [] for name", "in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new", "# process multiple probes probes = [] for name in eis_sc_check: probes.append(name[3:4]) if", "4: probes = probes[:-2] if len(probes) > 1: probe_string = probes[0] + '-'", "ending procedure.') return # find where the spin starts _, spin_starts = np.unique(spin_nums[1],", "over all spacecraft and define common energy grid for pp in range(len(omni_vars)): temp_data", "mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional energy spectrogram", "== 'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*' + data_rate + '*' +", "'flux') suffix: str suffix of the loaded data; useful for preserving original tplot", "for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)]", "if len(probes) > 4: probes = probes[:-2] if len(probes) > 1: probe_string =", "= [species] if not isinstance(datatype, list): datatype = [datatype] out_vars = [] for", "= np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's pitch angle dist for all", "probes[-1] else: if probes: probe_string = probes[0] else: print('No probes found from eis_sc_check", "'cps': units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype)", "energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]),", "# DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO USE # AS", "in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee", "get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:]", "variables created. ''' ## Thoughts for extensions: ## - Ensure arguments passed to", "probe in enumerate(probes): # note: return from get_data here is (times, data, v)", "namespace is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time", "'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units", "of lowecase if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps':", "'*' + data_units + '*omni'+ suffix) # process multiple probes probes = []", "= np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time x energy omni_spec =", "nanmean from bottleneck if it's installed, otherwise use the numpy one # bottleneck", "nanmean(energy_data[ee,:], axis=0) # Average omni flux over all spacecraft for tt in range(len(time_refprobe[0])):", "'_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO USE #", "loaded!') return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's pitch angle", "time/energy # Note: I did not split these tuples as the namespace is", "suffix: str suffix of the loaded data; useful for preserving original tplot var", "str desired units for data, e.g., 'flux' or 'cps' (default: 'flux') suffix: str", "+ '_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO USE", "tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional", "' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums =", "+ data_units + '*omni'+ suffix) # process multiple probes probes = [] for", "values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p]", "'electron' eis_sc_check = tnames('mms*eis*' + data_rate + '*' + dtype+'*' + _species +", "'srvy' or 'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str", "omni_spec_data[:] = np.nan # time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] =", "str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') level: str data", "len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux over all spacecraft and define", "on multiple MMS spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy', or 'phxtof' (default:", "~2.5x faster try: import bottleneck as bn nanmean = bn.nanmean except ImportError: nanmean", "[datatype] out_vars = [] for species_id in species: for dtype in datatype: #", "common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni flux over all spacecraft for tt", "now ending procedure.') return # find where the spin starts _, spin_starts =", "= species_id if dtype == 'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*' +", "variables of species to determine # of probes _species = species_id if dtype", "+ probes[-1] else: if probes: probe_string = probes[0] else: print('No probes found from", "from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level", "temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) #", "tuples as the namespace is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe =", "(default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for", "data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog', True) options(new_name,", "species_id in species: for dtype in datatype: # retrieve: omni variables of species", "time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft omni_spec_data", "procedure.') return # find where the spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True)", "= tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes))", "allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level + '_' + dtype", "= np.zeros(len(energy_refprobe[2])) # Average omni flux over all spacecraft and define common energy", "in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)] =", "= '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name,", "= np.nan # time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan", "get_data, store_data, options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy',", "modules are of lowecase if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units", "from EIS on multiple MMS spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy', or", "otherwise use the numpy one # bottleneck nanmean is ~2.5x faster try: import", "list): species = [species] if not isinstance(datatype, list): datatype = [datatype] out_vars =", "probe_string = probes[0] + '-' + probes[-1] else: if probes: probe_string = probes[0]", "enumerate(probes): # note: return from get_data here is (times, data, v) # according", "data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could not find EIS", "['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for data, e.g., 'flux' or 'cps' (default:", "not omni_vars: print('No EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes))", "level='l2', suffix='', ): ''' Combines omni-directional energy spectrogram variable from EIS on multiple", "# Average omni flux over all spacecraft for tt in range(len(time_refprobe[0])): for ee", "range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1 sp = '_spin'", "[] for name in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes = probes[:-2]", "there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times,", "(default: 'flux') suffix: str suffix of the loaded data; useful for preserving original", "= np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2]))", "= np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux over all spacecraft", "get_data here is (times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there", "I did not split these tuples as the namespace is reused, i.e., \"_refprobe\"", "in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni flux over all spacecraft", "range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni flux over all spacecraft for", "passed to modules are of lowecase if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]'", "units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the", "energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy", "all 6 (omni) telescopes for p, probe in enumerate(probes): # note: return from", "these tuples as the namespace is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe", "level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for data, e.g., 'flux' or 'cps'", "OF TIME STEPS TO USE # AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix)", "species for calculation, e.g., proton, oxygen, alpha or electron (default: 'proton') Returns: Name", "one # bottleneck nanmean is ~2.5x faster try: import bottleneck as bn nanmean", "not find EIS spin variable -- now ending procedure.') return # find where", "= len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc =", "e.g., 'flux' or 'cps' (default: 'flux') suffix: str suffix of the loaded data;", "prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based on minimum time/energy #", "calculation, e.g., proton, oxygen, alpha or electron (default: 'proton') Returns: Name of tplot", "temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni flux", "spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] =", "Average omni flux over all spacecraft and define common energy grid for pp", "= int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based on minimum", "for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni flux over", "omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc =", "oxygen, alpha or electron (default: 'proton') Returns: Name of tplot variables created. '''", "time x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan", "spacecraft and define common energy grid for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp])", "data; useful for preserving original tplot var species: str species for calculation, e.g.,", "data_units: str desired units for data, e.g., 'flux' or 'cps' (default: 'flux') suffix:", "# Retrieve specific probe's data based on minimum time/energy # Note: I did", "are of lowecase if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units ==", "# note: there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v =", "# retrieve: omni variables of species to determine # of probes _species =", "x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan #", "note: return from get_data here is (times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66", "0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog',", "'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate: str instrument data rate, e.g., 'srvy'", "= tnames('mms*eis*' + data_rate + '*' + dtype+'*' + _species + '*' +", "TIME STEPS TO USE # AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if", "bn nanmean = bn.nanmean except ImportError: nanmean = np.nanmean from pytplot import get_data,", "#t, data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times)", "spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start =", "omni flux over all spacecraft and define common energy grid for pp in", "probes[0] else: print('No probes found from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' +", "for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable", "return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:]", "'electroenergy', or 'phxtof' (default: 'extof') data_rate: str instrument data rate, e.g., 'srvy' or", "= 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts':", "data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') level: str", "store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec', True)", "= get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies)", "np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) #", "elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label =", "dtype+'*' + _species + '*' + data_units + '*omni'+ suffix) # process multiple", "data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available 'spec_bins'", "= nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1 sp = '_spin' new_name =", "eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes = probes[:-2] if len(probes) > 1:", "np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux over", "1: probe_string = probes[0] + '-' + probes[-1] else: if probes: probe_string =", "EIS spin variable -- now ending procedure.') return # find where the spin", "is None: print('Error: Could not find EIS spin variable -- now ending procedure.')", "in datatype: # retrieve: omni variables of species to determine # of probes", "species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional energy spectrogram variable", "'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle',", "omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size)", "_species = species_id if dtype == 'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*'", "nanmean is ~2.5x faster try: import bottleneck as bn nanmean = bn.nanmean except", "= [datatype] out_vars = [] for species_id in species: for dtype in datatype:", "'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet')", "'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p])", "omni flux over all spacecraft for tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])):", "REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data loaded!') return", "np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux over all spacecraft and", "from pytplot import get_data, store_data, options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton',", "WITH SMALLEST NUMBER OF TIME STEPS TO USE # AS A REFERENCE SPACECRAFT", "for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1", "= allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name,", "str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for data, e.g., 'flux'", "+ '-' + probes[-1] else: if probes: probe_string = probes[0] else: print('No probes", "EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's", "## Thoughts for extensions: ## - Ensure arguments passed to modules are of", "from get_data here is (times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note:", "''' ## Thoughts for extensions: ## - Ensure arguments passed to modules are", "= get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft omni_spec_data =", "= get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)])", "species: for dtype in datatype: # retrieve: omni variables of species to determine", "# Average omni flux over all spacecraft and define common energy grid for", "if probes: probe_string = probes[0] else: print('No probes found from eis_sc_check tnames.') return", "alpha or electron (default: 'proton') Returns: Name of tplot variables created. ''' ##", "-- now ending procedure.') return # find where the spin starts _, spin_starts", "tplot variables created. ''' ## Thoughts for extensions: ## - Ensure arguments passed", "'mmsx_epd_eis_' + data_rate + '_' + level + '_' + dtype + '_'", "units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units ==", "USE # AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No", "datatype: str 'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate: str instrument data rate,", "'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums", "+ _species + '*' + data_units + '*omni'+ suffix) # process multiple probes", "as the namespace is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc])", "probes _species = species_id if dtype == 'electronenergy': _species = 'electron' eis_sc_check =", "omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni", "== 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is str if not isinstance(species, list):", "len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average", "x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)])", "#assert type(datatype) is str if not isinstance(species, list): species = [species] if not", "range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)] = 0.", "= spin_starts[spin_idx] + 1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux,", "options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name,", "- Ensure arguments passed to modules are of lowecase if data_units == 'flux':", "or 'phxtof' (default: 'extof') data_rate: str instrument data rate, e.g., 'srvy' or 'brst'", "'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap',", "range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in", "is (times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also", "[keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None:", "eis_sc_check = tnames('mms*eis*' + data_rate + '*' + dtype+'*' + _species + '*'", "process multiple probes probes = [] for name in eis_sc_check: probes.append(name[3:4]) if len(probes)", "ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)]", "bottleneck if it's installed, otherwise use the numpy one # bottleneck nanmean is", "str suffix of the loaded data; useful for preserving original tplot var species:", "+ dtype+'*' + _species + '*' + data_units + '*omni'+ suffix) # process", "probes = probes[:-2] if len(probes) > 1: probe_string = probes[0] + '-' +", "str 'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate: str instrument data rate, e.g.,", "elif data_units == 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is str if not", "new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True)", "Thoughts for extensions: ## - Ensure arguments passed to modules are of lowecase", "omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True)", "EIS on multiple MMS spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy', or 'phxtof'", "# time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data =", "find where the spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts),", "axis=0) current_start = spin_starts[spin_idx] + 1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name,", "'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name)", "eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level +", "i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy x", "str species for calculation, e.g., proton, oxygen, alpha or electron (default: 'proton') Returns:", "+ '_' + level + '_' + dtype + '_' # DETERMINE SPACECRAFT", "store_data, options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2',", "'CountRate\\n[counts/s]' elif data_units == 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is str if", "tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level + '_'", "where the spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])])", "True) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label)", "'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog', True) options(new_name, 'spec', True) out_vars.append(new_name) return", "'Counts\\n[counts]' #assert type(datatype) is str if not isinstance(species, list): species = [species] if", "options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog', True) options(new_name, 'spec', True) out_vars.append(new_name)", "according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t,", "e.g., proton, oxygen, alpha or electron (default: 'proton') Returns: Name of tplot variables", "pytplot import get_data, store_data, options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux',", "omni-directional energy spectrogram variable from EIS on multiple MMS spacecraft. Parameters ---------- datatype:", "not isinstance(species, list): species = [species] if not isinstance(datatype, list): datatype = [datatype]", "(omni) telescopes for p, probe in enumerate(probes): # note: return from get_data here", "'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is str if not isinstance(species, list): species", "get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size))", "nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni'", "numpy as np # use nanmean from bottleneck if it's installed, otherwise use", "units_label = 'Counts\\n[counts]' #assert type(datatype) is str if not isinstance(species, list): species =", "to determine # of probes _species = species_id if dtype == 'electronenergy': _species", "desired units for data, e.g., 'flux' or 'cps' (default: 'flux') suffix: str suffix", "species_id if dtype == 'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*' + data_rate", "'*omni'+ suffix) # process multiple probes probes = [] for name in eis_sc_check:", "data, e.g., 'flux' or 'cps' (default: 'flux') suffix: str suffix of the loaded", "type(datatype) is str if not isinstance(species, list): species = [species] if not isinstance(datatype,", "= probes[0] + '-' + probes[-1] else: if probes: probe_string = probes[0] else:", "get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could not find EIS spin variable --", "= nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name =", "+ '*omni'+ suffix) # process multiple probes probes = [] for name in", "as bn nanmean = bn.nanmean except ImportError: nanmean = np.nanmean from pytplot import", "print('No EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve", "# time x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] =", "current_start = spin_starts[spin_idx] + 1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts],", "data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label = 'CountRate\\n[counts/s]'", "= np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux", "here is (times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are", "energy spectrogram variable from EIS on multiple MMS spacecraft. Parameters ---------- datatype: str", "= allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name,", "np.zeros(len(energy_refprobe[2])) # Average omni flux over all spacecraft and define common energy grid", "= get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee]", "did not split these tuples as the namespace is reused, i.e., \"_refprobe\" time_refprobe", "np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based", "of the loaded data; useful for preserving original tplot var species: str species", "get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc", "len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size))", "'extof') data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') level:", "True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', '", "multiple probes probes = [] for name in eis_sc_check: probes.append(name[3:4]) if len(probes) >", "and define common energy grid for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp]", "'proton') Returns: Name of tplot variables created. ''' ## Thoughts for extensions: ##", "also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies", "in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes = probes[:-2] if len(probes) >", "spectrogram variable from EIS on multiple MMS spacecraft. Parameters ---------- datatype: str 'extof',", "# Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could", "data based on minimum time/energy # Note: I did not split these tuples", "reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix", "(times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available", "to modules are of lowecase if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif", "= bn.nanmean except ImportError: nanmean = np.nanmean from pytplot import get_data, store_data, options", "[] for species_id in species: for dtype in datatype: # retrieve: omni variables", "energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:],", "data_units == 'cps': units_label = 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label = 'Counts\\n[counts]'", "retrieve: omni variables of species to determine # of probes _species = species_id", "omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data loaded!') return time_size =", "''' Combines omni-directional energy spectrogram variable from EIS on multiple MMS spacecraft. Parameters", "isinstance(species, list): species = [species] if not isinstance(datatype, list): datatype = [datatype] out_vars", "'flux' or 'cps' (default: 'flux') suffix: str suffix of the loaded data; useful", "in enumerate(probes): # note: return from get_data here is (times, data, v) #", "in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1 sp =", "is ~2.5x faster try: import bottleneck as bn nanmean = bn.nanmean except ImportError:", "True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(),", "angle dist for all 6 (omni) telescopes for p, probe in enumerate(probes): #", "print('No probes found from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate +", "= np.nanmean from pytplot import get_data, store_data, options from ...utilities.tnames import tnames def", "for extensions: ## - Ensure arguments passed to modules are of lowecase if", "of species to determine # of probes _species = species_id if dtype ==", "v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] =", "'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based on minimum time/energy # Note: I", "= [] for species_id in species: for dtype in datatype: # retrieve: omni", "not split these tuples as the namespace is reused, i.e., \"_refprobe\" time_refprobe =", "or 'cps' (default: 'flux') suffix: str suffix of the loaded data; useful for", "for preserving original tplot var species: str species for calculation, e.g., proton, oxygen,", "energy grid for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp]", "axis=0) # Average omni flux over all spacecraft for tt in range(len(time_refprobe[0])): for", "0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] +", "# use nanmean from bottleneck if it's installed, otherwise use the numpy one", "data_rate + '_' + level + '_' + dtype + '_' # DETERMINE", "if len(probes) > 1: probe_string = probes[0] + '-' + probes[-1] else: if", "Returns: Name of tplot variables created. ''' ## Thoughts for extensions: ## -", "spacecraft for tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0)", "'+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's pitch", "get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]),", "e.g., 'srvy' or 'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units:", "not isinstance(datatype, list): datatype = [datatype] out_vars = [] for species_id in species:", "= probes[0] else: print('No probes found from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_'", "1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec',", "new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]})", "_species + '*' + data_units + '*omni'+ suffix) # process multiple probes probes", "int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific", "store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog', True)", "electron (default: 'proton') Returns: Name of tplot variables created. ''' ## Thoughts for", "'-' + probes[-1] else: if probes: probe_string = probes[0] else: print('No probes found", "variable -- now ending procedure.') return # find where the spin starts _,", "len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])])", "= np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix =", "of tplot variables created. ''' ## Thoughts for extensions: ## - Ensure arguments", "v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available 'spec_bins' values", "name in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes = probes[:-2] if len(probes)", "(default)] data_units: str desired units for data, e.g., 'flux' or 'cps' (default: 'flux')", "return # find where the spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux", "+ 1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name,", "Retrieve specific probe's data based on minimum time/energy # Note: I did not", "omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size", "common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux over all spacecraft and define common", "bottleneck nanmean is ~2.5x faster try: import bottleneck as bn nanmean = bn.nanmean", "DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO USE # AS A", "use the numpy one # bottleneck nanmean is ~2.5x faster try: import bottleneck", "species: str species for calculation, e.g., proton, oxygen, alpha or electron (default: 'proton')", "np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time x energy omni_spec = np.empty([len(time_refprobe[0]),", "# of probes _species = species_id if dtype == 'electronenergy': _species = 'electron'", "spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1 sp = '_spin' new_name", "'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string,", "---------- datatype: str 'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate: str instrument data", "time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc", "# store new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0],", "tplot var species: str species for calculation, e.g., proton, oxygen, alpha or electron", "energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy = np.zeros(len(energy_refprobe[2])) # Average omni flux over all", "data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional energy spectrogram variable from", "MMS spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate:", "nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx] + 1 sp = '_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp", "if not omni_vars: print('No EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size =", "based on minimum time/energy # Note: I did not split these tuples as", "= 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name,", "common energy grid for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)]", "'phxtof' (default: 'extof') data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default:", "x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time x", "all spacecraft and define common energy grid for pp in range(len(omni_vars)): temp_data =", "NUMBER OF TIME STEPS TO USE # AS A REFERENCE SPACECRAFT omni_vars =", "variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog',", "for species_id in species: for dtype in datatype: # retrieve: omni variables of", "ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based on", "= get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could not find EIS spin variable", "data rate, e.g., 'srvy' or 'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2'", "AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data", "telescopes for p, probe in enumerate(probes): # note: return from get_data here is", "available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies =", "else: if probes: probe_string = probes[0] else: print('No probes found from eis_sc_check tnames.')", "if spin_nums is None: print('Error: Could not find EIS spin variable -- now", "data_units == 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is str if not isinstance(species,", "probes probes = [] for name in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4:", "note: there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p])", "Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could not", "options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name,", "determine # of probes _species = species_id if dtype == 'electronenergy': _species =", "for name in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes = probes[:-2] if", "options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy", "spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx", "for all 6 (omni) telescopes for p, probe in enumerate(probes): # note: return", "for p, probe in enumerate(probes): # note: return from get_data here is (times,", "extensions: ## - Ensure arguments passed to modules are of lowecase if data_units", "pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for", "omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data = np.zeros([len(energy_refprobe[2]), len(probes)]) common_energy =", "'*' + dtype+'*' + _species + '*' + data_units + '*omni'+ suffix) #", "options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\", "from bottleneck if it's installed, otherwise use the numpy one # bottleneck nanmean", "len(omni_spec[0,:])]) current_start = 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start", "int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based on minimum time/energy", "options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data", "if dtype == 'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*' + data_rate +", "datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional energy spectrogram variable from EIS", "= 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start = spin_starts[spin_idx]", "suffix='', ): ''' Combines omni-directional energy spectrogram variable from EIS on multiple MMS", "omni_vars: print('No EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) #", "'_spin' new_name = allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog',", "> 1: probe_string = probes[0] + '-' + probes[-1] else: if probes: probe_string", "print('Error: Could not find EIS spin variable -- now ending procedure.') return #", "'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog', True) options(new_name, 'spec', True)", "np.nan # time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:] = np.nan energy_data", "= get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size =", "the numpy one # bottleneck nanmean is ~2.5x faster try: import bottleneck as", "spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy', or 'phxtof' (default: 'extof') data_rate: str", "= np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx in", "+ '*' + dtype+'*' + _species + '*' + data_units + '*omni'+ suffix)", "nanmean = np.nanmean from pytplot import get_data, store_data, options from ...utilities.tnames import tnames", "\"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x energy x spacecraft", "np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'", "spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time x energy", "data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional energy spectrogram variable from EIS on", "pitch angle dist for all 6 (omni) telescopes for p, probe in enumerate(probes):", "probe's pitch angle dist for all 6 (omni) telescopes for p, probe in", "the loaded data; useful for preserving original tplot var species: str species for", "import bottleneck as bn nanmean = bn.nanmean except ImportError: nanmean = np.nanmean from", "+ data_rate + '_' + level + '_' + dtype + '_' #", "all spacecraft for tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:],", "to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data,", "are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v = get_data(omni_vars[p]) omni_times, omni_data,", "level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for data, e.g.,", "= np.zeros(len(probes)) # Retrieve probe's pitch angle dist for all 6 (omni) telescopes", "level + '_' + dtype + '_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER", "SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO USE # AS A REFERENCE", "current_start = 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0) current_start =", "for data, e.g., 'flux' or 'cps' (default: 'flux') suffix: str suffix of the", "on minimum time/energy # Note: I did not split these tuples as the", "True) options(new_name, 'zlog', True) options(new_name, 'ylog', True) options(new_name, 'spec', True) out_vars.append(new_name) return out_vars", "get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] =", "axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name,", "np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's pitch angle dist for all 6", "\\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums = get_data(prefix+'spin'+suffix)", "dtype == 'electronenergy': _species = 'electron' eis_sc_check = tnames('mms*eis*' + data_rate + '*'", "'cps' (default: 'flux') suffix: str suffix of the loaded data; useful for preserving", "): ''' Combines omni-directional energy spectrogram variable from EIS on multiple MMS spacecraft.", "bottleneck as bn nanmean = bn.nanmean except ImportError: nanmean = np.nanmean from pytplot", "# find where the spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux =", "if not isinstance(species, list): species = [species] if not isinstance(datatype, list): datatype =", "probes[0] + '-' + probes[-1] else: if probes: probe_string = probes[0] else: print('No", "str if not isinstance(species, list): species = [species] if not isinstance(datatype, list): datatype", "if not isinstance(datatype, list): datatype = [datatype] out_vars = [] for species_id in", "as np # use nanmean from bottleneck if it's installed, otherwise use the", "for dtype in datatype: # retrieve: omni variables of species to determine #", "len(omni_times) energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size)", "import get_data, store_data, options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof',", "probes.append(name[3:4]) if len(probes) > 4: probes = probes[:-2] if len(probes) > 1: probe_string", "data_rate + '*' + dtype+'*' + _species + '*' + data_units + '*omni'+", "spin_nums is None: print('Error: Could not find EIS spin variable -- now ending", "# according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords)", "starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0", "Could not find EIS spin variable -- now ending procedure.') return # find", "except ImportError: nanmean = np.nanmean from pytplot import get_data, store_data, options from ...utilities.tnames", "def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines omni-directional energy", "use nanmean from bottleneck if it's installed, otherwise use the numpy one #", "or electron (default: 'proton') Returns: Name of tplot variables created. ''' ## Thoughts", "new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True)", "= [] for name in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes =", "is str if not isinstance(species, list): species = [species] if not isinstance(datatype, list):", "probes: probe_string = probes[0] else: print('No probes found from eis_sc_check tnames.') return allmms_prefix", "= 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve specific probe's data based on minimum time/energy # Note:", "ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni flux over all", "found from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' +", "store new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name = allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec,", "Combines omni-directional energy spectrogram variable from EIS on multiple MMS spacecraft. Parameters ----------", "return time_size = np.zeros(len(probes)) energy_size = np.zeros(len(probes)) # Retrieve probe's pitch angle dist", "ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' #", "6 (omni) telescopes for p, probe in enumerate(probes): # note: return from get_data", "https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 # note: there are also available 'spec_bins' values #print(pytplot.data_quants[omni_vars[p]].coords) #t, data, v", "spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could not find EIS spin", "np.zeros(len(probes)) # Retrieve probe's pitch angle dist for all 6 (omni) telescopes for", "= probes[:-2] if len(probes) > 1: probe_string = probes[0] + '-' + probes[-1]", "SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data loaded!') return time_size", "temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)):", "import numpy as np # use nanmean from bottleneck if it's installed, otherwise", "datatype: # retrieve: omni variables of species to determine # of probes _species", "+ dtype + '_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS", "np # use nanmean from bottleneck if it's installed, otherwise use the numpy", "_species.upper(), 'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums", "len(probes)]) omni_spec_data[:] = np.nan # time x energy omni_spec = np.empty([len(time_refprobe[0]), len(energy_refprobe[2])]) omni_spec[:]", "multiple MMS spacecraft. Parameters ---------- datatype: str 'extof', 'electroenergy', or 'phxtof' (default: 'extof')", "energy x spacecraft omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time", "allmms_prefix+_species+'_'+data_units+'_omni'+sp store_data(new_name, data={'x':spin_nums[0][spin_starts], 'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog',", "if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label =", "else: print('No probes found from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate", "Note: I did not split these tuples as the namespace is reused, i.e.,", "the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error: Could not find", "STEPS TO USE # AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not", "datatype = [datatype] out_vars = [] for species_id in species: for dtype in", "# bottleneck nanmean is ~2.5x faster try: import bottleneck as bn nanmean =", "'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle',", "useful for preserving original tplot var species: str species for calculation, e.g., proton,", "data, v = get_data(omni_vars[p]) omni_times, omni_data, omni_energies = get_data(omni_vars[p]) time_size[p] = len(omni_times) energy_size[p]", "the spin starts _, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start", "# Note: I did not split these tuples as the namespace is reused,", "import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): ''' Combines", "lowecase if data_units == 'flux': units_label = 'Intensity\\n[1/cm^2-sr-s-keV]' elif data_units == 'cps': units_label", "species = [species] if not isinstance(datatype, list): datatype = [datatype] out_vars = []", "'_' + dtype + '_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME", "energy_size[p] = len(omni_energies) reftime_sc_loc = np.argmin(time_size) ref_sc_time_size = int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size", "try: import bottleneck as bn nanmean = bn.nanmean except ImportError: nanmean = np.nanmean", "allmms_prefix+_species+'_'+data_units+'_omni' store_data(new_name, data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec',", "probe's data based on minimum time/energy # Note: I did not split these", "rate, e.g., 'srvy' or 'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)]", "omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot variable omni_spec[np.isnan(omni_spec)] = 0. new_name", "grid for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] = temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] =", "from ...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ):", "= int(min(time_size)) refenergy_sc_loc = np.argmin(energy_size) ref_sc_energy_size = int(min(energy_size)) prefix = 'mms'+probes[reftime_sc_loc]+'_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_' # Retrieve", "np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx in range(len(spin_starts)):", "split these tuples as the namespace is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc])", "nanmean = bn.nanmean except ImportError: nanmean = np.nanmean from pytplot import get_data, store_data,", "or 'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired", "it's installed, otherwise use the numpy one # bottleneck nanmean is ~2.5x faster", "tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars: print('No EIS '+dtype+'data loaded!') return time_size = np.zeros(len(probes)) energy_size", "return from get_data here is (times, data, v) # according to https://github.com/MAVENSDC/PyTplot/blob/ec87591521e84bae8d81caccaf64fc2a5785186f/pytplot/get_data.py#L66 #", "'Energy [keV]'])) out_vars.append(new_name) # Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is", "range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store new tplot", "= 'CountRate\\n[counts/s]' elif data_units == 'counts': units_label = 'Counts\\n[counts]' #assert type(datatype) is str", "## - Ensure arguments passed to modules are of lowecase if data_units ==", "installed, otherwise use the numpy one # bottleneck nanmean is ~2.5x faster try:", "np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:], axis=0)", "Name of tplot variables created. ''' ## Thoughts for extensions: ## - Ensure", "flux over all spacecraft and define common energy grid for pp in range(len(omni_vars)):", "loaded data; useful for preserving original tplot var species: str species for calculation,", "Retrieve probe's pitch angle dist for all 6 (omni) telescopes for p, probe", "original tplot var species: str species for calculation, e.g., proton, oxygen, alpha or", "probe_string = probes[0] else: print('No probes found from eis_sc_check tnames.') return allmms_prefix =", "'_' + level + '_' + dtype + '_' # DETERMINE SPACECRAFT WITH", "= nanmean(energy_data[ee,:], axis=0) # Average omni flux over all spacecraft for tt in", "data={'x':time_refprobe[0], 'y':omni_spec, 'v':energy_refprobe[2]}) options(new_name, 'ylog', True) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name,", "probes[:-2] if len(probes) > 1: probe_string = probes[0] + '-' + probes[-1] else:", "'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', ' \\\\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]']))", "(default: 'extof') data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy')", "units for data, e.g., 'flux' or 'cps' (default: 'flux') suffix: str suffix of", "energy_size = np.zeros(len(probes)) # Retrieve probe's pitch angle dist for all 6 (omni)", "= np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for spin_idx in range(len(spin_starts)): spin_sum_flux[spin_idx,:] = nanmean(omni_spec[current_start:spin_starts[spin_idx],:],", "(default: 'proton') Returns: Name of tplot variables created. ''' ## Thoughts for extensions:", "+ '_' + dtype + '_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF", "'y':spin_sum_flux, 'v':energy_refprobe[2]}) options(new_name, 'spec', True) options(new_name, 'zlog', True) options(new_name, 'ylog', True) options(new_name, 'spec',", "+ level + '_' + dtype + '_' # DETERMINE SPACECRAFT WITH SMALLEST", "created. ''' ## Thoughts for extensions: ## - Ensure arguments passed to modules", "list): datatype = [datatype] out_vars = [] for species_id in species: for dtype", "is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) # time x", "None: print('Error: Could not find EIS spin variable -- now ending procedure.') return", "isinstance(datatype, list): datatype = [datatype] out_vars = [] for species_id in species: for", "_, spin_starts = np.unique(spin_nums[1], return_index=True) spin_sum_flux = np.zeros([len(spin_starts), len(omni_spec[0,:])]) current_start = 0 for", "= temp_data[2][0:len(common_energy)] omni_spec_data[0:ref_sc_time_size,:,pp] = temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0)", "faster try: import bottleneck as bn nanmean = bn.nanmean except ImportError: nanmean =", "for tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) #", "tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] = nanmean(omni_spec_data[tt,ee,:], axis=0) # store", "len(probes) > 1: probe_string = probes[0] + '-' + probes[-1] else: if probes:", "Average omni flux over all spacecraft for tt in range(len(time_refprobe[0])): for ee in", "omni_spec_data = np.empty([len(time_refprobe[0]), len(energy_refprobe[2]), len(probes)]) omni_spec_data[:] = np.nan # time x energy omni_spec", "instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') level: str data level", "= 'electron' eis_sc_check = tnames('mms*eis*' + data_rate + '*' + dtype+'*' + _species", "dist for all 6 (omni) telescopes for p, probe in enumerate(probes): # note:", "out_vars.append(new_name) # Spin-average the data spin_nums = get_data(prefix+'spin'+suffix) if spin_nums is None: print('Error:", "over all spacecraft for tt in range(len(time_refprobe[0])): for ee in range(len(energy_refprobe[2])): omni_spec[tt,ee] =", "> 4: probes = probes[:-2] if len(probes) > 1: probe_string = probes[0] +", "# Retrieve probe's pitch angle dist for all 6 (omni) telescopes for p,", "= temp_data[1][0:ref_sc_time_size,0:len(common_energy)] for ee in range(len(common_energy)): common_energy[ee] = nanmean(energy_data[ee,:], axis=0) # Average omni", "specific probe's data based on minimum time/energy # Note: I did not split", "'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for data,", "the namespace is reused, i.e., \"_refprobe\" time_refprobe = get_data(omni_vars[reftime_sc_loc]) energy_refprobe = get_data(omni_vars[refenergy_sc_loc]) #", "return allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level + '_' +", "probes = [] for name in eis_sc_check: probes.append(name[3:4]) if len(probes) > 4: probes", "p, probe in enumerate(probes): # note: return from get_data here is (times, data,", "preserving original tplot var species: str species for calculation, e.g., proton, oxygen, alpha", "tnames('mms*eis*' + data_rate + '*' + dtype+'*' + _species + '*' + data_units", "bn.nanmean except ImportError: nanmean = np.nanmean from pytplot import get_data, store_data, options from", "np.nanmean from pytplot import get_data, store_data, options from ...utilities.tnames import tnames def mms_eis_spec_combine_sc(", "for calculation, e.g., proton, oxygen, alpha or electron (default: 'proton') Returns: Name of", "define common energy grid for pp in range(len(omni_vars)): temp_data = get_data(omni_vars[pp]) energy_data[:,pp] =", "data level ['l1a','l1b','l2pre','l2' (default)] data_units: str desired units for data, e.g., 'flux' or", "...utilities.tnames import tnames def mms_eis_spec_combine_sc( species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix='', ): '''", "suffix) # process multiple probes probes = [] for name in eis_sc_check: probes.append(name[3:4])", "len(probes) > 4: probes = probes[:-2] if len(probes) > 1: probe_string = probes[0]", "# note: return from get_data here is (times, data, v) # according to", "TO USE # AS A REFERENCE SPACECRAFT omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix) if not omni_vars:", "out_vars = [] for species_id in species: for dtype in datatype: # retrieve:", "dtype + '_' # DETERMINE SPACECRAFT WITH SMALLEST NUMBER OF TIME STEPS TO", "probes found from eis_sc_check tnames.') return allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_'", "of probes _species = species_id if dtype == 'electronenergy': _species = 'electron' eis_sc_check" ]
[]
[ "% 7 # some commands are different between mysql and sqlite is_mysql =", "total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user =", "HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name():", "server.base import cached_route from server.models import LaundryPreference, LaundrySnapshot, User from server.penndata import laundry", "HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids):", "ValueError as e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not", "get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else: error_msg =", "data] all_dryers = [int(x[\"all_total_dryers\"]) for x in data] all_washers = [int(x[\"all_total_washers\"]) for x", "from server.models import LaundryPreference, LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def", "= round((now - midnight).seconds / 60) # check if we already have data", "since midnight est = timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0,", "the past 30 days), group them by time, and include # the first", "except ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x", "+ 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) ) &", "def usage_data(hall_no, year, month, day): # turn date info into a date object", "date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError:", "{x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year,", "# turn date info into a date object # find start range by", "get_data) def save_data(): \"\"\"Retrieves current laundry info and saves it into the database.\"\"\"", "dow is sunday = 0 dow = (now.weekday() + 1) % 7 tmw", "request from pytz import timezone from requests.exceptions import HTTPError from sqlalchemy import Integer,", "for k in range(27)} for x in data: hour = int(x[\"time\"]) # if", "get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x) for x in", "return usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year,", "user = User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids =", "@app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall", "if x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"])", "est = timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no,", "hall id passed to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is", "week (if today is tuesday, get all the tuesdays # in the past", "total_washers = sum( [room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item", "account_id = g.account.id if g.account else None for room_id in room_ids: laundry_preference =", "all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry api is", "We hope this will be fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td", "{k: 0 for k in range(27)} dryer_total = {k: 0 for k in", "= LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\",", "dow + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | (", "laundry server is currently not updating. We hope this will be fixed shortly.\"", "on the day # of week (if today is tuesday, get all the", "room_ids = [int(x) for x in room_ids.split(\",\")] account_id = g.account.id if g.account else", "range(27)} for x in data: hour = int(x[\"time\"]) # if the value is", "= {k: 0 for k in range(27)} for x in data: hour =", "func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) ) & (LaundrySnapshot.date >= start) ) )", "# delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in", "jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid", "hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour]", "@app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The", ") data = [x._asdict() for x in data] all_dryers = [int(x[\"all_total_dryers\"]) for x", "in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month,", "midnight).seconds / 60) # check if we already have data for this minute", "True, \"error_msg\": None} else: error_msg = \"Penn's laundry server is currently not updating.", "room_ids = request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False, \"error\": \"No rooms specified.\"})", "now - datetime.timedelta(days=30) # get the current day of the week for today", "room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] )", "total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create()", "data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time", "time, and include # the first 2 hours of the next day data", "min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x])", "have data for this minute # if we do, skip with app.app_context(): if", "room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try:", "\"time\") .all() ) data = [x._asdict() for x in data] all_dryers = [int(x[\"all_total_dryers\"])", "laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date", "HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) def safe_division(a, b): return", "next day data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql", "is_mysql = sqldb.engine.name == \"mysql\" # get the laundry information for today based", "hours if x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] +=", "washer_total[hour] += 1 dryer_total[hour] += 1 dates = [x[\"date\"] for x in data]", "} @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day): def get_data(): return usage_data(hall_no, year,", "sqlalchemy import Integer, cast, exists, func from server import app, sqldb from server.auth", "the number of minutes since midnight est = timezone(\"EST\") now = datetime.datetime.now(est) midnight", "days), group them by time, and include # the first 2 hours of", "dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day):", "python dow is monday = 0, while sql dow is sunday = 0", "in laundry.hall_id_list} data = laundry.all_status() for name, room in data.items(): id = ids[name]", "b): return round(a / float(b), 3) if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\")", "the value is for tomorrow, add 24 hours if x[\"date\"].weekday() != now.weekday(): hour", "in hall_ids.split(\",\")] output = {\"rooms\": []} for hall in halls: hall_data = laundry.hall_status(hall)", "= sqldb.engine.name == \"mysql\" # get the laundry information for today based on", "import calendar import datetime from flask import g, jsonify, request from pytz import", "{k: 0 for k in range(27)} for x in data: hour = int(x[\"time\"])", "two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return", "(hall_no, year, month, day), td, get_data) def save_data(): \"\"\"Retrieves current laundry info and", "specified.\"}) # delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x", "day), td, get_data) def save_data(): \"\"\"Retrieves current laundry info and saves it into", "(dow + 1) % 7 # some commands are different between mysql and", "td, get_data) def save_data(): \"\"\"Retrieves current laundry info and saves it into the", "# get the laundry information for today based on the day # of", "info into a date object # find start range by subtracting 30 days", "except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def", "in data] all_dryers = [int(x[\"all_total_dryers\"]) for x in data] all_washers = [int(x[\"all_total_washers\"]) for", "@app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day): def get_data(): return usage_data(hall_no, year, month,", "in data] all_washers = [int(x[\"all_total_washers\"]) for x in data] washer_points = {k: 0", "unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\":", "int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1 dates = [x[\"date\"] for x in", "= 0 dow = (now.weekday() + 1) % 7 tmw = (dow +", "import LaundryPreference, LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try:", "now.month, now.day) def usage_data(hall_no, year, month, day): # turn date info into a", "str(tmw) ) ) ) & (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date,", "% (hall_no, year, month, day), td, get_data) def save_data(): \"\"\"Retrieves current laundry info", "@app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret)", ") | ( (LaundrySnapshot.time <= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw", "in data: hour = int(x[\"time\"]) # if the value is for tomorrow, add", "is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est)", "for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day): def", "save_laundry_preferences(): try: user = User.get_or_create() except ValueError as e: return jsonify({\"success\": False, \"error\":", "as e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids:", ".order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict() for x in data] all_dryers =", "laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\",", "[now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"),", "account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences():", "methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create() except ValueError as e: return", "def get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else: error_msg", "include # the first 2 hours of the next day data = (", "User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()})", "preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in room_ids.split(\",\")] account_id =", ") & (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() )", "date info into a date object # find start range by subtracting 30", "room in data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers", "7 # some commands are different between mysql and sqlite is_mysql = sqldb.engine.name", "get all the tuesdays # in the past 30 days), group them by", "\"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers,", "len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x])", "- 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql else func.strftime(\"%w\",", "a dict for hall name -> id ids = {x[\"hall_name\"]: x[\"id\"] for x", "None} else: error_msg = \"Penn's laundry server is currently not updating. We hope", "now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date() time = round((now - midnight).seconds /", "= datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year, month, day): #", "in the past 30 days), group them by time, and include # the", "= [x[\"date\"] for x in data] if not dates: dates = [now] return", "datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td, get_data) def save_data(): \"\"\"Retrieves", "exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) ).scalar(): return # make a dict", "x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time, room=id,", "month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td,", "# python dow is monday = 0, while sql dow is sunday =", ".all() ) data = [x._asdict() for x in data] all_dryers = [int(x[\"all_total_dryers\"]) for", "dates = [x[\"date\"] for x in data] if not dates: dates = [now]", "\"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\",", "[x[\"date\"] for x in data] if not dates: dates = [now] return {", "sum( [room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot(", "for x in data] if not dates: dates = [now] return { \"hall_name\":", "mysql and sqlite is_mysql = sqldb.engine.name == \"mysql\" # get the laundry information", "return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td, get_data) def save_data(): \"\"\"Retrieves current", "minute=0, second=0, microsecond=0) date = now.date() time = round((now - midnight).seconds / 60)", "jsonify, request from pytz import timezone from requests.exceptions import HTTPError from sqlalchemy import", "% 7 tmw = (dow + 1) % 7 # some commands are", "timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date()", "user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in room_ids.split(\",\")] account_id = g.account.id if", "import auth from server.base import cached_route from server.models import LaundryPreference, LaundrySnapshot, User from", "in data] washer_points = {k: 0 for k in range(27)} dryer_points = {k:", "is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter(", "all_dryers = [int(x[\"all_total_dryers\"]) for x in data] all_washers = [int(x[\"all_total_washers\"]) for x in", "2 hours of the next day data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time", "[x._asdict() for x in data] all_dryers = [int(x[\"all_total_dryers\"]) for x in data] all_washers", "= now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date() time = round((now - midnight).seconds", "dates: dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\":", "the week for today and tomorrow # python dow is monday = 0,", "\"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in", "= ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for", "laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"})", "is currently unavailable.\"}) def safe_division(a, b): return round(a / float(b), 3) if b", "{k: 0 for k in range(27)} dryer_points = {k: 0 for k in", "room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x in [\"open\", \"running\",", "k in range(27)} dryer_points = {k: 0 for k in range(27)} washer_total =", "= {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data = laundry.all_status() for name, room", "it into the database.\"\"\" # get the number of minutes since midnight est", "== dow + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) |", "( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60,", "for x in data] washer_points = {k: 0 for k in range(27)} dryer_points", "second=0, microsecond=0) date = now.date() time = round((now - midnight).seconds / 60) #", "get the current day of the week for today and tomorrow # python", "get the laundry information for today based on the day # of week", "| ( (LaundrySnapshot.time <= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw +", "and include # the first 2 hours of the next day data =", "= timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date =", "currently not updating. We hope this will be fixed shortly.\" return {\"is_working\": False,", "x in data] washer_points = {k: 0 for k in range(27)} dryer_points =", "if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now", "washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1 dates", "( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw)", "be fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td = datetime.timedelta(hours=1) return cached_route(\"laundry:working\",", "str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False, \"error\": \"No rooms", "the laundry information for today based on the day # of week (if", "get_laundry_preferences(): try: user = User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id)", "0 for k in range(27)} for x in data: hour = int(x[\"time\"]) #", "minutes since midnight est = timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0,", "hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data)", "range(27)} dryer_points = {k: 0 for k in range(27)} washer_total = {k: 0", "month, day): def get_data(): return usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15) return", "the next day data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if", "+= 1 dates = [x[\"date\"] for x in data] if not dates: dates", "[x.room_id for x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def", "= [int(x) for x in hall_ids.split(\",\")] output = {\"rooms\": []} for hall in", "sqlite is_mysql = sqldb.engine.name == \"mysql\" # get the laundry information for today", "calendar import datetime from flask import g, jsonify, request from pytz import timezone", "return jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) # delete old preferences for user", "\"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for x in [\"open\", \"running\",", "start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict() for", "{ \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers),", "(LaundrySnapshot.time == time)) ).scalar(): return # make a dict for hall name ->", "x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if", "sqldb from server.auth import auth from server.base import cached_route from server.models import LaundryPreference,", "the tuesdays # in the past 30 days), group them by time, and", "now.date() time = round((now - midnight).seconds / 60) # check if we already", "x[\"id\"] for x in laundry.hall_id_list} data = laundry.all_status() for name, room in data.items():", "monday = 0, while sql dow is sunday = 0 dow = (now.weekday()", "if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) ).scalar(): return # make", "cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room", "= (now.weekday() + 1) % 7 tmw = (dow + 1) % 7", "api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date =", "# if we do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) &", "est = timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date", "import g, jsonify, request from pytz import timezone from requests.exceptions import HTTPError from", "add 24 hours if x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"])", "dow is monday = 0, while sql dow is sunday = 0 dow", "except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def", "jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return", "laundry info and saves it into the database.\"\"\" # get the number of", "usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def", "else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no,", "halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day)", "day): def get_data(): return usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\"", "commands are different between mysql and sqlite is_mysql = sqldb.engine.name == \"mysql\" #", "(LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql", "make a dict for hall name -> id ids = {x[\"hall_name\"]: x[\"id\"] for", "jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in preferences] return", "item = LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit()", "round((now - midnight).seconds / 60) # check if we already have data for", "if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else: error_msg = \"Penn's laundry server", "/ float(b), 3) if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est", "for hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall,", "ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except HTTPError: return jsonify({\"error\":", "between mysql and sqlite is_mysql = sqldb.engine.name == \"mysql\" # get the laundry", "sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) ).scalar(): return # make a", "turn date info into a date object # find start range by subtracting", "date = now.date() time = round((now - midnight).seconds / 60) # check if", "in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\":", "{\"rooms\": []} for hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"]", "by subtracting 30 days now = datetime.date(year, month, day) start = now -", "= ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time /", "def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry api", "not updating. We hope this will be fixed shortly.\" return {\"is_working\": False, \"error_msg\":", "str(dow) ) | ( (LaundrySnapshot.time <= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date) ==", "# make a dict for hall name -> id ids = {x[\"hall_name\"]: x[\"id\"]", "will be fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td = datetime.timedelta(hours=1) return", "== \"mysql\" # get the laundry information for today based on the day", "= [x._asdict() for x in data] all_dryers = [int(x[\"all_total_dryers\"]) for x in data]", "day) start = now - datetime.timedelta(days=30) # get the current day of the", "output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return", "microsecond=0) date = now.date() time = round((now - midnight).seconds / 60) # check", "import cached_route from server.models import LaundryPreference, LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\",", "sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user =", "of the week for today and tomorrow # python dow is monday =", "the current day of the week for today and tomorrow # python dow", "1 dates = [x[\"date\"] for x in data] if not dates: dates =", "return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year, month, day): # turn date", "days now = datetime.date(year, month, day) start = now - datetime.timedelta(days=30) # get", "value is for tomorrow, add 24 hours if x[\"date\"].weekday() != now.weekday(): hour +=", "x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\",", "& (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data", "laundry.hall_id_list} data = laundry.all_status() for name, room in data.items(): id = ids[name] dryers", "import datetime from flask import g, jsonify, request from pytz import timezone from", "tomorrow # python dow is monday = 0, while sql dow is sunday", "already have data for this minute # if we do, skip with app.app_context():", "date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True)", "\"\"\"Retrieves current laundry info and saves it into the database.\"\"\" # get the", "washer_total[x]) for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points},", "return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in preferences]", "old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in room_ids.split(\",\")] account_id", "dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user", "for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time,", "currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return", "hall_ids.split(\",\")] output = {\"rooms\": []} for hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"]", "\"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict() for x in data] all_dryers", "import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return", "methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall id", "data: hour = int(x[\"time\"]) # if the value is for tomorrow, add 24", "the day # of week (if today is tuesday, get all the tuesdays", "hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def", "HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id,", "== hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql else", "name, room in data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"]", "= 0, while sql dow is sunday = 0 dow = (now.weekday() +", "[int(x[\"all_total_dryers\"]) for x in data] all_washers = [int(x[\"all_total_washers\"]) for x in data] washer_points", "is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError:", "+= 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] +=", "time)) ).scalar(): return # make a dict for hall name -> id ids", "if we already have data for this minute # if we do, skip", "\"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\")", "if g.account else None for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id)", "tuesdays # in the past 30 days), group them by time, and include", "we do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time ==", "{x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data = laundry.all_status() for name, room in", "= room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]]", "into the database.\"\"\" # get the number of minutes since midnight est =", "information for today based on the day # of week (if today is", "cached_route from server.models import LaundryPreference, LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"])", "# find start range by subtracting 30 days now = datetime.date(year, month, day)", "range by subtracting 30 days now = datetime.date(year, month, day) start = now", "( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"),", "return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\":", "jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) # delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete()", "for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in room_ids.split(\",\")] account_id = g.account.id", "else None for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit()", "HTTPError from sqlalchemy import Integer, cast, exists, func from server import app, sqldb", "sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user", "calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x:", "user = User.get_or_create() except ValueError as e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids", "laundry information for today based on the day # of week (if today", "180 - 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql else", "== str(dow) ) | ( (LaundrySnapshot.time <= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date)", "preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working(): return", "def safe_division(a, b): return round(a / float(b), 3) if b > 0 else", "& ( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) ==", "usage_data(hall_no, year, month, day): # turn date info into a date object #", "sunday = 0 dow = (now.weekday() + 1) % 7 tmw = (dow", "for x in hall_ids.split(\",\")] output = {\"rooms\": []} for hall in halls: hall_data", "LaundrySnapshot.date) == str(tmw) ) ) ) & (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date,", "def usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day)", "else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( (", "in range(27)} dryer_points = {k: 0 for k in range(27)} washer_total = {k:", "get the number of minutes since midnight est = timezone(\"EST\") now = datetime.datetime.now(est)", "is sunday = 0 dow = (now.weekday() + 1) % 7 tmw =", "is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time <= 180 -", "in range(27)} for x in data: hour = int(x[\"time\"]) # if the value", "laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\":", "func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date)", "datetime.timedelta(days=30) # get the current day of the week for today and tomorrow", "3) if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\")", "washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def", "get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else: error_msg = \"Penn's laundry", "passed to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"})", "jsonify({\"error\": \"Invalid hall id passed to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry", "return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\":", "\"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\":", "api is currently unavailable.\"}) def safe_division(a, b): return round(a / float(b), 3) if", "is monday = 0, while sql dow is sunday = 0 dow =", "def save_data(): \"\"\"Retrieves current laundry info and saves it into the database.\"\"\" #", "for today and tomorrow # python dow is monday = 0, while sql", "= {\"rooms\": []} for hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall", "server import app, sqldb from server.auth import auth from server.base import cached_route from", "for this minute # if we do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date", "g.account else None for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference)", "= {k: 0 for k in range(27)} washer_total = {k: 0 for k", "def usage(hall_no, year, month, day): def get_data(): return usage_data(hall_no, year, month, day) td", ".group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict() for x in data]", "for k in range(27)} washer_total = {k: 0 for k in range(27)} dryer_total", "sqldb.engine.name == \"mysql\" # get the laundry information for today based on the", "in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working():", "if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time <= 180", "sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\")", "except ValueError as e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if", "unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est) halls =", "different between mysql and sqlite is_mysql = sqldb.engine.name == \"mysql\" # get the", "try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently", "jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False,", "usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year, month, day): # turn date info", "hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to", "\"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret", "currently unavailable.\"}) def safe_division(a, b): return round(a / float(b), 3) if b >", "def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry api", "= {k: 0 for k in range(27)} dryer_points = {k: 0 for k", "in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"])", "and saves it into the database.\"\"\" # get the number of minutes since", "return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\":", "def get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else: error_msg = \"Penn's", "ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in", "object # find start range by subtracting 30 days now = datetime.date(year, month,", "unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return", "for tomorrow, add 24 hours if x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour]", "= hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"])", "laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret =", "else: error_msg = \"Penn's laundry server is currently not updating. We hope this", ") .filter( ( (LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow +", "x in hall_ids.split(\",\")] output = {\"rooms\": []} for hall in halls: hall_data =", "def get_data(): return usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" %", "1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) ) & (LaundrySnapshot.date", "of week (if today is tuesday, get all the tuesdays # in the", "@app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create() except ValueError: return jsonify({\"rooms\": []})", "while sql dow is sunday = 0 dow = (now.weekday() + 1) %", "room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\":", "+ 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time", "requests.exceptions import HTTPError from sqlalchemy import Integer, cast, exists, func from server import", "auth from server.base import cached_route from server.models import LaundryPreference, LaundrySnapshot, User from server.penndata", "\"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\":", "User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for", "cast, exists, func from server import app, sqldb from server.auth import auth from", "return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except HTTPError: return jsonify({\"error\": \"The", "to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\",", "x in data: hour = int(x[\"time\"]) # if the value is for tomorrow,", "get_data(): return usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no,", "if we do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time", "of the next day data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\")", "if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), )", "func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) )", "x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour]", "group them by time, and include # the first 2 hours of the", "import HTTPError from sqlalchemy import Integer, cast, exists, func from server import app,", "est = timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x) for x in hall_ids.split(\",\")]", "return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"})", "LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"])", "this will be fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td = datetime.timedelta(hours=1)", "0 for k in range(27)} washer_total = {k: 0 for k in range(27)}", "\"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\": {x:", "usage(hall_no, year, month, day): def get_data(): return usage_data(hall_no, year, month, day) td =", "1) & ( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date)", "if the value is for tomorrow, add 24 hours if x[\"date\"].weekday() != now.weekday():", "app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) ).scalar(): return #", "jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create() except", "sum( [room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum(", "= User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id", "Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no) &", "from server import app, sqldb from server.auth import auth from server.base import cached_route", ".filter( ( (LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow + 1", "[int(x[\"all_total_washers\"]) for x in data] washer_points = {k: 0 for k in range(27)}", "a date object # find start range by subtracting 30 days now =", "LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in room_ids.split(\",\")] account_id = g.account.id if g.account", "60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no)", "is for tomorrow, add 24 hours if x[\"date\"].weekday() != now.weekday(): hour += 24", "hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\",", "laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)),", "usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return", "dryer_total = {k: 0 for k in range(27)} for x in data: hour", "True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create() except ValueError:", "Integer, cast, exists, func from server import app, sqldb from server.auth import auth", "30 days now = datetime.date(year, month, day) start = now - datetime.timedelta(days=30) #", "laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\":", "hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql else func.strftime(\"%w\",", "tmw + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) )", "( ( func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) ==", "request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) # delete", "return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except", "now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year, month, day):", "ids = {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data = laundry.all_status() for name,", "safe_division(a, b): return round(a / float(b), 3) if b > 0 else 0", "= datetime.date(year, month, day) start = now - datetime.timedelta(days=30) # get the current", "currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est) halls", "halls = [int(x) for x in hall_ids.split(\",\")] output = {\"rooms\": []} for hall", "0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est) return", "start = now - datetime.timedelta(days=30) # get the current day of the week", "month, day): # turn date info into a date object # find start", "for today based on the day # of week (if today is tuesday,", "\"Penn's laundry server is currently not updating. We hope this will be fixed", "import app, sqldb from server.auth import auth from server.base import cached_route from server.models", "safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points},", "laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else: error_msg = \"Penn's laundry server is", "number of minutes since midnight est = timezone(\"EST\") now = datetime.datetime.now(est) midnight =", "them by time, and include # the first 2 hours of the next", "[laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to", "# some commands are different between mysql and sqlite is_mysql = sqldb.engine.name ==", "currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]}", "data] all_washers = [int(x[\"all_total_washers\"]) for x in data] washer_points = {k: 0 for", "now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date() time", "from requests.exceptions import HTTPError from sqlalchemy import Integer, cast, exists, func from server", "today based on the day # of week (if today is tuesday, get", "washer_total = {k: 0 for k in range(27)} dryer_total = {k: 0 for", "are different between mysql and sqlite is_mysql = sqldb.engine.name == \"mysql\" # get", "\"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]]", "0 for k in range(27)} dryer_total = {k: 0 for k in range(27)}", "info and saves it into the database.\"\"\" # get the number of minutes", "{\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall id passed", "func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time <= 180 - 1) &", "\"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False, \"error\": \"No", "LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"])", "from server.base import cached_route from server.models import LaundryPreference, LaundrySnapshot, User from server.penndata import", "= now.date() time = round((now - midnight).seconds / 60) # check if we", "return jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\":", "\"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no,", "= LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\",", "import Integer, cast, exists, func from server import app, sqldb from server.auth import", "safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month,", "( (LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow + 1 if", "LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def", "try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently", "LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\":", "[]}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in preferences] return jsonify({\"rooms\":", "# get the number of minutes since midnight est = timezone(\"EST\") now =", "hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\":", "minute # if we do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date)", "all the tuesdays # in the past 30 days), group them by time,", "[room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date,", "methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences", "month, day), td, get_data) def save_data(): \"\"\"Retrieves current laundry info and saves it", "else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time <= 180 - 1)", "time = round((now - midnight).seconds / 60) # check if we already have", ") sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create() except", "@app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x)", "@app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month,", "24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1", "== str(tmw) ) ) ) & (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\")", "methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry", "we already have data for this minute # if we do, skip with", "return round(a / float(b), 3) if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def", "hours of the next day data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time /", "try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"})", "day # of week (if today is tuesday, get all the tuesdays #", "day data = ( sqldb.session.query( LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else", "for x in laundry.hall_id_list} data = laundry.all_status() for name, room in data.items(): id", "\"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, )", "day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td, get_data)", "data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum(", "room_ids = [x.room_id for x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def", "= [x.room_id for x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status():", "dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"),", "x in data] if not dates: dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no],", "to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall", "now.year, now.month, now.day) def usage_data(hall_no, year, month, day): # turn date info into", "fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td = datetime.timedelta(hours=1) return cached_route(\"laundry:working\", td,", "pytz import timezone from requests.exceptions import HTTPError from sqlalchemy import Integer, cast, exists,", "+ 1) % 7 # some commands are different between mysql and sqlite", "= {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall id", "not dates: dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()],", "is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) ) & (LaundrySnapshot.date >= start)", "day of the week for today and tomorrow # python dow is monday", "LaundrySnapshot.date, ( func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ),", "jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except HTTPError:", "laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output)", "> 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est)", "= room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x in [\"open\",", "dryer_total[hour] += 1 dates = [x[\"date\"] for x in data] if not dates:", "( (LaundrySnapshot.time <= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw + 1", "for hall name -> id ids = {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list}", "= sum( [room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers =", "datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year, month, day): # turn", "/ 60) # check if we already have data for this minute #", "\"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers,", "= \"Penn's laundry server is currently not updating. We hope this will be", "for k in range(27)} dryer_points = {k: 0 for k in range(27)} washer_total", "date = datetime.datetime.now(est) halls = [int(x) for x in hall_ids.split(\",\")] output = {\"rooms\":", "def get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x) for x", "= request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) #", "= [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\":", "24 hours if x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour]", "\"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create() except ValueError: return", "None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create() except ValueError: return jsonify({\"rooms\":", "[\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers,", "app, sqldb from server.auth import auth from server.base import cached_route from server.models import", "date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except", "= [int(x) for x in room_ids.split(\",\")] account_id = g.account.id if g.account else None", "hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id):", "if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) ) & (LaundrySnapshot.date >=", "= g.account.id if g.account else None for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id,", "try: user = User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids", "this minute # if we do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date ==", "in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for x in", "@app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None}", "= int(x[\"time\"]) # if the value is for tomorrow, add 24 hours if", "\"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers),", "laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"])", "= sum( [room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item =", "not room_ids: return jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) # delete old preferences", "= laundry.all_status() for name, room in data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"]", "<= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if is_mysql", "dict for hall name -> id ids = {x[\"hall_name\"]: x[\"id\"] for x in", "timezone from requests.exceptions import HTTPError from sqlalchemy import Integer, cast, exists, func from", "\"Invalid hall id passed to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api", "in range(27)} washer_total = {k: 0 for k in range(27)} dryer_total = {k:", "preferences = LaundryPreference.query.filter_by(user_id=user.id) room_ids = [x.room_id for x in preferences] return jsonify({\"rooms\": room_ids})", ") ) ) & (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\")", "func from server import app, sqldb from server.auth import auth from server.base import", "save_data(): \"\"\"Retrieves current laundry info and saves it into the database.\"\"\" # get", "func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) == dow", "today and tomorrow # python dow is monday = 0, while sql dow", "== time)) ).scalar(): return # make a dict for hall name -> id", "saves it into the database.\"\"\" # get the number of minutes since midnight", "safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x],", "[\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for x in [\"open\",", "hope this will be fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td =", "methods=[\"GET\"]) def get_rooms(hall_ids): est = timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x) for", "skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) ).scalar():", "time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def", "except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def", "from server.auth import auth from server.base import cached_route from server.models import LaundryPreference, LaundrySnapshot,", "def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError:", "x in data] all_dryers = [int(x[\"all_total_dryers\"]) for x in data] all_washers = [int(x[\"all_total_washers\"])", "from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except", "dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day): def get_data(): return usage_data(hall_no,", "washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try:", "jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\",", "year, month, day), td, get_data) def save_data(): \"\"\"Retrieves current laundry info and saves", "for x in data] all_dryers = [int(x[\"all_total_dryers\"]) for x in data] all_washers =", "room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True,", "methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\": True, \"error_msg\": None} else:", "func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no) & ( (", ") ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict() for x", "1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time <=", "in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day): def get_data(): return", "(now.weekday() + 1) % 7 tmw = (dow + 1) % 7 #", "id ids = {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data = laundry.all_status() for", "date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id))", "- datetime.timedelta(days=30) # get the current day of the week for today and", "data = laundry.all_status() for name, room in data.items(): id = ids[name] dryers =", "!= now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] +=", "is tuesday, get all the tuesdays # in the past 30 days), group", "room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None})", "datetime.datetime.now(est) halls = [int(x) for x in hall_ids.split(\",\")] output = {\"rooms\": []} for", "flask import g, jsonify, request from pytz import timezone from requests.exceptions import HTTPError", "year, month, day): # turn date info into a date object # find", "== tmw + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) )", "{k: 0 for k in range(27)} washer_total = {k: 0 for k in", "= {k: 0 for k in range(27)} dryer_total = {k: 0 for k", "and sqlite is_mysql = sqldb.engine.name == \"mysql\" # get the laundry information for", "# in the past 30 days), group them by time, and include #", "b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now =", "# the first 2 hours of the next day data = ( sqldb.session.query(", "\"mysql\" # get the laundry information for today based on the day #", "60) # check if we already have data for this minute # if", "return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try:", "delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for x in room_ids.split(\",\")]", "[int(x) for x in room_ids.split(\",\")] account_id = g.account.id if g.account else None for", "g, jsonify, request from pytz import timezone from requests.exceptions import HTTPError from sqlalchemy", "for k in range(27)} dryer_total = {k: 0 for k in range(27)} for", "(LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data =", "if not room_ids: return jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) # delete old", "# of week (if today is tuesday, get all the tuesdays # in", "current day of the week for today and tomorrow # python dow is", "year, month, day): def get_data(): return usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15)", "with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) ).scalar(): return", "= User.get_or_create() except ValueError as e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids =", "dow = (now.weekday() + 1) % 7 tmw = (dow + 1) %", "= [int(x[\"all_total_washers\"]) for x in data] washer_points = {k: 0 for k in", "subtracting 30 days now = datetime.date(year, month, day) start = now - datetime.timedelta(days=30)", "(LaundrySnapshot.time <= 180 - 1) & ( func.dayofweek(LaundrySnapshot.date) == tmw + 1 if", "now.weekday(): hour += 24 washer_points[hour] += int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1", "server.auth import auth from server.base import cached_route from server.models import LaundryPreference, LaundrySnapshot, User", "), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no) & (", "based on the day # of week (if today is tuesday, get all", "dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1 dates = [x[\"date\"] for", "shortly.\" return {\"is_working\": False, \"error_msg\": error_msg} td = datetime.timedelta(hours=1) return cached_route(\"laundry:working\", td, get_data)", "methods=[\"GET\"]) def usage(hall_no, year, month, day): def get_data(): return usage_data(hall_no, year, month, day)", "for x in data] all_washers = [int(x[\"all_total_washers\"]) for x in data] washer_points =", "+ 1) % 7 tmw = (dow + 1) % 7 # some", "return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est", "hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year,", "hall name -> id ids = {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data", "k in range(27)} for x in data: hour = int(x[\"time\"]) # if the", "/ 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"),", "sql dow is sunday = 0 dow = (now.weekday() + 1) % 7", "in data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers =", "func.floor(LaundrySnapshot.time / 60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"),", "range(27)} dryer_total = {k: 0 for k in range(27)} for x in data:", "datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date() time = round((now", "laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list})", "@auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create() except ValueError as e: return jsonify({\"success\":", "\"The laundry api is currently unavailable.\"}) def safe_division(a, b): return round(a / float(b),", "def hall(hall_id): try: return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall id passed", "/ 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room ==", "to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\",", "from sqlalchemy import Integer, cast, exists, func from server import app, sqldb from", "return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"})", "server.models import LaundryPreference, LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls():", "# get the current day of the week for today and tomorrow #", "= timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year,", "room_ids.split(\",\")] account_id = g.account.id if g.account else None for room_id in room_ids: laundry_preference", "- midnight).seconds / 60) # check if we already have data for this", "in room_ids.split(\",\")] account_id = g.account.id if g.account else None for room_id in room_ids:", "\"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for", "jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data(): if laundry.check_is_working(): return {\"is_working\": True,", "= LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"])", "washer_points = {k: 0 for k in range(27)} dryer_points = {k: 0 for", "# if the value is for tomorrow, add 24 hours if x[\"date\"].weekday() !=", "(if today is tuesday, get all the tuesdays # in the past 30", "except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except HTTPError: return", "sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create() except ValueError as", "database.\"\"\" # get the number of minutes since midnight est = timezone(\"EST\") now", "1) % 7 tmw = (dow + 1) % 7 # some commands", "data = [x._asdict() for x in data] all_dryers = [int(x[\"all_total_dryers\"]) for x in", "g.account.id if g.account else None for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id,", "x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for x", "return # make a dict for hall name -> id ids = {x[\"hall_name\"]:", "x in data] all_washers = [int(x[\"all_total_washers\"]) for x in data] washer_points = {k:", "max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x", "for x in preferences] return jsonify({\"rooms\": room_ids}) @app.route(\"/laundry/status\", methods=[\"GET\"]) def get_laundry_status(): def get_data():", "id passed to server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently", "td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td, get_data) def", "func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"), ) .filter( ( (LaundrySnapshot.room == hall_no) & ( ( func.dayofweek(LaundrySnapshot.date) ==", "return jsonify(laundry.hall_status(hall_id)) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except", "name -> id ids = {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data =", "laundry.all_status() for name, room in data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"] washers", "tuesday, get all the tuesdays # in the past 30 days), group them", "else func.strftime(\"%w\", LaundrySnapshot.date) == str(tmw) ) ) ) & (LaundrySnapshot.date >= start) )", "= now - datetime.timedelta(days=30) # get the current day of the week for", "find start range by subtracting 30 days now = datetime.date(year, month, day) start", "= [int(x[\"all_total_dryers\"]) for x in data] all_washers = [int(x[\"all_total_washers\"]) for x in data]", "\"error\": \"No rooms specified.\"}) # delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids =", "& ( ( func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date)", "timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year, now.month, now.day) def usage_data(hall_no, year, month,", "# check if we already have data for this minute # if we", "datetime from flask import g, jsonify, request from pytz import timezone from requests.exceptions", "\"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\": calendar.day_name[now.weekday()], \"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)),", "60).label(\"time\") if is_mysql else cast(LaundrySnapshot.time / 60, Integer).label(\"time\") ), func.avg(LaundrySnapshot.washers).label(\"all_washers\"), func.avg(LaundrySnapshot.dryers).label(\"all_dryers\"), func.avg(LaundrySnapshot.total_washers).label(\"all_total_washers\"), func.avg(LaundrySnapshot.total_dryers).label(\"all_total_dryers\"),", "timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x) for x in hall_ids.split(\",\")] output =", "by time, and include # the first 2 hours of the next day", "tomorrow, add 24 hours if x[\"date\"].weekday() != now.weekday(): hour += 24 washer_points[hour] +=", "= datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date() time =", "0 for k in range(27)} dryer_points = {k: 0 for k in range(27)}", "@app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create() except ValueError as e:", "except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) def safe_division(a, b):", "len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\":", "now = datetime.date(year, month, day) start = now - datetime.timedelta(days=30) # get the", "def save_laundry_preferences(): try: user = User.get_or_create() except ValueError as e: return jsonify({\"success\": False,", "output = {\"rooms\": []} for hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] =", "\"error_msg\": None} else: error_msg = \"Penn's laundry server is currently not updating. We", "id = ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x]", "jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try:", "{\"is_working\": True, \"error_msg\": None} else: error_msg = \"Penn's laundry server is currently not", "round(a / float(b), 3) if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no):", "x in room_ids.split(\",\")] account_id = g.account.id if g.account else None for room_id in", "False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids: return jsonify({\"success\": False, \"error\":", "into a date object # find start range by subtracting 30 days now", "try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid", "for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x] for", "= datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td, get_data) def save_data():", "hour = int(x[\"time\"]) # if the value is for tomorrow, add 24 hours", "methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} return jsonify(to_ret) except", "methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry", "None for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return", "return {\"is_working\": True, \"error_msg\": None} else: error_msg = \"Penn's laundry server is currently", "7 tmw = (dow + 1) % 7 # some commands are different", "jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/rooms/<hall_ids>\", methods=[\"GET\"]) def get_rooms(hall_ids): est =", "return jsonify({\"success\": True, \"error\": None}) @app.route(\"/laundry/preferences\", methods=[\"GET\"]) def get_laundry_preferences(): try: user = User.get_or_create()", ").scalar(): return # make a dict for hall name -> id ids =", "is currently not updating. We hope this will be fixed shortly.\" return {\"is_working\":", "api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\":", "id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry api is", "year, month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day),", "0, while sql dow is sunday = 0 dow = (now.weekday() + 1)", "+= int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1 dates = [x[\"date\"] for x", "the first 2 hours of the next day data = ( sqldb.session.query( LaundrySnapshot.date,", "all_washers = [int(x[\"all_total_washers\"]) for x in data] washer_points = {k: 0 for k", "midnight est = timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0)", "= timezone(\"EST\") date = datetime.datetime.now(est) halls = [int(x) for x in hall_ids.split(\",\")] output", "== date) & (LaundrySnapshot.time == time)) ).scalar(): return # make a dict for", "usage_data(hall_no, year, month, day) td = datetime.timedelta(minutes=15) return cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month,", "current laundry info and saves it into the database.\"\"\" # get the number", "week for today and tomorrow # python dow is monday = 0, while", "for room_id in room_ids: laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) sqldb.session.add(laundry_preference) sqldb.session.commit() return jsonify({\"success\":", "for x in data: hour = int(x[\"time\"]) # if the value is for", "( func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow)", "in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) item = LaundrySnapshot( date=date, time=time, room=id, washers=washers,", "= datetime.datetime.now(est) halls = [int(x) for x in hall_ids.split(\",\")] output = {\"rooms\": []}", "jsonify({\"error\": \"The laundry api is currently unavailable.\"}) def safe_division(a, b): return round(a /", "dryer_points = {k: 0 for k in range(27)} washer_total = {k: 0 for", "False, \"error\": \"No rooms specified.\"}) # delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids", "jsonify({\"halls\": laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) def", "exists, func from server import app, sqldb from server.auth import auth from server.base", ">= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict()", "int(x[\"time\"]) # if the value is for tomorrow, add 24 hours if x[\"date\"].weekday()", "try: user = User.get_or_create() except ValueError as e: return jsonify({\"success\": False, \"error\": str(e)})", "first 2 hours of the next day data = ( sqldb.session.query( LaundrySnapshot.date, (", "sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences(): try: user = User.get_or_create() except ValueError", "month, day) start = now - datetime.timedelta(days=30) # get the current day of", "0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est = timezone(\"EST\") now = datetime.datetime.now(est) return usage(hall_no, now.year,", "+= 1 dryer_total[hour] += 1 dates = [x[\"date\"] for x in data] if", ") item = LaundrySnapshot( date=date, time=time, room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item)", "data] if not dates: dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no],", "date) & (LaundrySnapshot.time == time)) ).scalar(): return # make a dict for hall", "api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"]) def id_to_name(): try: return jsonify({\"halls\": laundry.hall_id_list}) except", "some commands are different between mysql and sqlite is_mysql = sqldb.engine.name == \"mysql\"", "0 dow = (now.weekday() + 1) % 7 tmw = (dow + 1)", "server is currently not updating. We hope this will be fixed shortly.\" return", "past 30 days), group them by time, and include # the first 2", "unavailable.\"}) def safe_division(a, b): return round(a / float(b), 3) if b > 0", "func.dayofweek(LaundrySnapshot.date) == dow + 1 if is_mysql else func.strftime(\"%w\", LaundrySnapshot.date) == str(dow) )", "server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/halls/ids\", methods=[\"GET\"])", "[]} for hall in halls: hall_data = laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] =", "in range(27)} dryer_total = {k: 0 for k in range(27)} for x in", "safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in", "return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) def safe_division(a, b): return round(a", "start range by subtracting 30 days now = datetime.date(year, month, day) start =", "@app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError: return jsonify({\"error\": \"The", "laundry api is currently unavailable.\"}) def safe_division(a, b): return round(a / float(b), 3)", "tmw = (dow + 1) % 7 # some commands are different between", "do, skip with app.app_context(): if sqldb.session.query( exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time))", "from flask import g, jsonify, request from pytz import timezone from requests.exceptions import", "LaundrySnapshot.date) == str(dow) ) | ( (LaundrySnapshot.time <= 180 - 1) & (", "e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\") if not room_ids: return", "LaundryPreference, LaundrySnapshot, User from server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return", "now.day) def usage_data(hall_no, year, month, day): # turn date info into a date", "{x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x", "updating. We hope this will be fixed shortly.\" return {\"is_working\": False, \"error_msg\": error_msg}", "float(b), 3) if b > 0 else 0 @app.route(\"/laundry/usage/<int:hall_no>\") def usage_shortcut(hall_no): est =", "[room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers = sum( [room[\"washers\"][x]", "room=id, washers=washers, dryers=dryers, total_washers=total_washers, total_dryers=total_dryers, ) sqldb.session.add(item) sqldb.session.commit() @app.route(\"/laundry/preferences\", methods=[\"POST\"]) @auth(nullable=True) def save_laundry_preferences():", "washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\",", "return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2):", "check if we already have data for this minute # if we do,", "x in laundry.hall_id_list} data = laundry.all_status() for name, room in data.items(): id =", "is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"]) def two_halls(hall_id, hall_id2): try: to_ret = {\"halls\": [laundry.hall_status(hall_id),", "day): # turn date info into a date object # find start range", ") .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all() ) data = [x._asdict() for x in", "the database.\"\"\" # get the number of minutes since midnight est = timezone(\"EST\")", "from pytz import timezone from requests.exceptions import HTTPError from sqlalchemy import Integer, cast,", "server.\"}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) @app.route(\"/laundry/hall/<int:hall_id>/<int:hall_id2>\", methods=[\"GET\"])", "data] washer_points = {k: 0 for k in range(27)} dryer_points = {k: 0", "today is tuesday, get all the tuesdays # in the past 30 days),", "k in range(27)} washer_total = {k: 0 for k in range(27)} dryer_total =", "data for this minute # if we do, skip with app.app_context(): if sqldb.session.query(", "room_ids: return jsonify({\"success\": False, \"error\": \"No rooms specified.\"}) # delete old preferences for", "import timezone from requests.exceptions import HTTPError from sqlalchemy import Integer, cast, exists, func", "k in range(27)} dryer_total = {k: 0 for k in range(27)} for x", "midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) date = now.date() time = round((now -", "[int(x) for x in hall_ids.split(\",\")] output = {\"rooms\": []} for hall in halls:", "def get_laundry_preferences(): try: user = User.get_or_create() except ValueError: return jsonify({\"rooms\": []}) preferences =", "User.get_or_create() except ValueError as e: return jsonify({\"success\": False, \"error\": str(e)}) room_ids = request.form.get(\"rooms\")", "laundry.hall_id_list}) except HTTPError: return jsonify({\"error\": \"The laundry api is currently unavailable.\"}) def safe_division(a,", "date object # find start range by subtracting 30 days now = datetime.date(year,", "x in dryer_points}, } @app.route(\"/laundry/usage/<int:hall_no>/<int:year>-<int:month>-<int:day>\", methods=[\"GET\"]) def usage(hall_no, year, month, day): def get_data():", "+= int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1 dates =", "for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, }", "total_dryers = sum( [room[\"dryers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] ) total_washers", "of minutes since midnight est = timezone(\"EST\") now = datetime.datetime.now(est) midnight = now.replace(hour=0,", "\"No rooms specified.\"}) # delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x)", "ids[name] dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x", "dryers = room[\"dryers\"][\"open\"] washers = room[\"washers\"][\"open\"] total_dryers = sum( [room[\"dryers\"][x] for x in", "server.penndata import laundry @app.route(\"/laundry/halls\", methods=[\"GET\"]) def all_halls(): try: return jsonify({\"halls\": laundry.all_status()}) except HTTPError:", "= usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return jsonify(output) @app.route(\"/laundry/hall/<int:hall_id>\", methods=[\"GET\"]) def hall(hall_id): try:", "30 days), group them by time, and include # the first 2 hours", "-> id ids = {x[\"hall_name\"]: x[\"id\"] for x in laundry.hall_id_list} data = laundry.all_status()", "range(27)} washer_total = {k: 0 for k in range(27)} dryer_total = {k: 0", "for name, room in data.items(): id = ids[name] dryers = room[\"dryers\"][\"open\"] washers =", "rooms specified.\"}) # delete old preferences for user LaundryPreference.query.filter_by(user_id=user.id).delete() room_ids = [int(x) for", "error_msg = \"Penn's laundry server is currently not updating. We hope this will", "if not dates: dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\": laundry.id_to_location[hall_no], \"day_of_week\":", "datetime.date(year, month, day) start = now - datetime.timedelta(days=30) # get the current day", "and tomorrow # python dow is monday = 0, while sql dow is", "= (dow + 1) % 7 # some commands are different between mysql", "for x in room_ids.split(\",\")] account_id = g.account.id if g.account else None for room_id", ") total_washers = sum( [room[\"washers\"][x] for x in [\"open\", \"running\", \"offline\", \"out_of_order\"]] )", "\"start_date\": min(dates).strftime(\"%Y-%m-%d\"), \"end_date\": max(dates).strftime(\"%Y-%m-%d\"), \"total_number_of_dryers\": safe_division(sum(all_dryers), len(all_dryers)), \"total_number_of_washers\": safe_division(sum(all_washers), len(all_washers)), \"washer_data\": {x: safe_division(washer_points[x],", "int(x[\"all_washers\"]) dryer_points[hour] += int(x[\"all_dryers\"]) washer_total[hour] += 1 dryer_total[hour] += 1 dates = [x[\"date\"]", "in data] if not dates: dates = [now] return { \"hall_name\": laundry.id_to_hall[hall_no], \"location\":", "1) % 7 # some commands are different between mysql and sqlite is_mysql", "jsonify(to_ret) except ValueError: return jsonify({\"error\": \"Invalid hall id passed to server.\"}) except HTTPError:", "cached_route(\"laundry:usage:%s:%s-%s-%s\" % (hall_no, year, month, day), td, get_data) def save_data(): \"\"\"Retrieves current laundry", ") ) & (LaundrySnapshot.date >= start) ) ) .group_by(LaundrySnapshot.date, \"time\") .order_by(LaundrySnapshot.date, \"time\") .all()", "\"washer_data\": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, \"dryer_data\": {x: safe_division(dryer_points[x], dryer_total[x]) for", "& (LaundrySnapshot.time == time)) ).scalar(): return # make a dict for hall name", "1 dryer_total[hour] += 1 dates = [x[\"date\"] for x in data] if not", "= laundry.hall_status(hall) hall_data[\"id\"] = hall hall_data[\"usage_data\"] = usage_data(hall, date.year, date.month, date.day) output[\"rooms\"].append(hall_data) return" ]
[ "parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating", "\"\"\"Validates each properties defined in the yaml configuration file \"\"\" self.logger.info(\"Validating the configuration", "file_name, logger=None): self.logger = logger self.file_name = file_name try: with open(file_name, \"r\", encoding=\"utf-8\")", "self.validate() # Converting datetime object to string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config]", "if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading the", "if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the config. Errors: %s\" % (", "Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully validated the config file\") return validator.document", "configurations from %s file at line %s.\" % (file_name, mark.line), ) else: print_and_log(", "import print_and_log class Configuration: __instance = None def __new__(cls, *args, **kwargs): if not", "self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\",", "date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined", "self.logger, \"exception\", \"Error while reading the configurations from %s file at line %s.\"", "reading the configurations from %s file at line %s.\" % (file_name, mark.line), )", "yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger,", "Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None): self.logger = logger", "Validator from schema import schema from sharepoint_utils import print_and_log class Configuration: __instance =", "to string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates", "schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the config. Errors: %s\" %", "import Validator from schema import schema from sharepoint_utils import print_and_log class Configuration: __instance", "yaml configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema)", "file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except YAMLError", "the configurations from %s file at line %s.\" % (file_name, mark.line), ) else:", "properties defined in the yaml configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator", "%s.\" % (file_name, mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something went wrong while", "encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception, 'problem_mark'):", "schema import schema from sharepoint_utils import print_and_log class Configuration: __instance = None def", "class Configuration: __instance = None def __new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance", "= Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the config.", "yaml from yaml.error import YAMLError from cerberus import Validator from schema import schema", "as exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while", "self.logger, \"exception\", \"Something went wrong while parsing yaml file %s. Error: %s\" %", "validating the config. Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully validated the config", "validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the config. Errors: %s\" % ( validator.errors))", "\"exception\", \"Something went wrong while parsing yaml file %s. Error: %s\" % (file_name,", "self.configurations = self.validate() # Converting datetime object to string for date_config in [\"start_time\",", "Configuration.__instance def __init__(self, file_name, logger=None): self.logger = logger self.file_name = file_name try: with", "self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined in the yaml configuration file \"\"\"", "*args, **kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self, file_name,", "print_and_log(self.logger, \"error\", \"Error while validating the config. Errors: %s\" % ( validator.errors)) exit(0)", "the config. Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully validated the config file\")", "def __new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def", "mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading the configurations from %s", "while reading the configurations from %s file at line %s.\" % (file_name, mark.line),", "(file_name, exception), ) self.configurations = self.validate() # Converting datetime object to string for", "YAMLError from cerberus import Validator from schema import schema from sharepoint_utils import print_and_log", "= file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except", "def validate(self): \"\"\"Validates each properties defined in the yaml configuration file \"\"\" self.logger.info(\"Validating", "validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the", "\"exception\", \"Error while reading the configurations from %s file at line %s.\" %", "\"Error while validating the config. Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully validated", "from yaml.error import YAMLError from cerberus import Validator from schema import schema from", "wrong while parsing yaml file %s. Error: %s\" % (file_name, exception), ) self.configurations", "sharepoint_utils import print_and_log class Configuration: __instance = None def __new__(cls, *args, **kwargs): if", "as stream: self.configurations = yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception, 'problem_mark'): mark", "__instance = None def __new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls)", "mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something went wrong while parsing yaml file", ") else: print_and_log( self.logger, \"exception\", \"Something went wrong while parsing yaml file %s.", "yaml.error import YAMLError from cerberus import Validator from schema import schema from sharepoint_utils", "= self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined in the yaml configuration file", "from sharepoint_utils import print_and_log class Configuration: __instance = None def __new__(cls, *args, **kwargs):", "def __init__(self, file_name, logger=None): self.logger = logger self.file_name = file_name try: with open(file_name,", "logger self.file_name = file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations =", "**kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None):", "Error: %s\" % (file_name, exception), ) self.configurations = self.validate() # Converting datetime object", "\"error\", \"Error while validating the config. Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully", "<reponame>praveen-elastic/workplace-search-sharepoint16-connector import yaml from yaml.error import YAMLError from cerberus import Validator from schema", "Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None): self.logger = logger self.file_name", "return Configuration.__instance def __init__(self, file_name, logger=None): self.logger = logger self.file_name = file_name try:", "print_and_log class Configuration: __instance = None def __new__(cls, *args, **kwargs): if not Configuration.__instance:", "cerberus import Validator from schema import schema from sharepoint_utils import print_and_log class Configuration:", "yaml file %s. Error: %s\" % (file_name, exception), ) self.configurations = self.validate() #", "not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None): self.logger =", "Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the config. Errors:", "%s file at line %s.\" % (file_name, mark.line), ) else: print_and_log( self.logger, \"exception\",", "try: with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except YAMLError as", "from %s file at line %s.\" % (file_name, mark.line), ) else: print_and_log( self.logger,", "Converting datetime object to string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ')", "= self.validate() # Converting datetime object to string for date_config in [\"start_time\", \"end_time\"]:", "% (file_name, mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something went wrong while parsing", "exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading the configurations from %s file at", "open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except YAMLError as exception: if", "if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None): self.logger", "\"\"\" self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger,", "__new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance def __init__(self,", "validate(self): \"\"\"Validates each properties defined in the yaml configuration file \"\"\" self.logger.info(\"Validating the", "exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading", "from schema import schema from sharepoint_utils import print_and_log class Configuration: __instance = None", "from cerberus import Validator from schema import schema from sharepoint_utils import print_and_log class", "\"Something went wrong while parsing yaml file %s. Error: %s\" % (file_name, exception),", "%s. Error: %s\" % (file_name, exception), ) self.configurations = self.validate() # Converting datetime", "import YAMLError from cerberus import Validator from schema import schema from sharepoint_utils import", "None def __new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return Configuration.__instance", "__init__(self, file_name, logger=None): self.logger = logger self.file_name = file_name try: with open(file_name, \"r\",", "with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except YAMLError as exception:", "except YAMLError as exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\",", "self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined in the yaml configuration", "= logger self.file_name = file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations", "\"Error while reading the configurations from %s file at line %s.\" % (file_name,", "= exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading the configurations from %s file", "validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while validating the config. Errors: %s\"", "while parsing yaml file %s. Error: %s\" % (file_name, exception), ) self.configurations =", "[\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined in the", "file at line %s.\" % (file_name, mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something", "defined in the yaml configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator =", "the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error", "(file_name, mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something went wrong while parsing yaml", "object to string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self):", "= yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log(", "exception), ) self.configurations = self.validate() # Converting datetime object to string for date_config", "in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined in", "hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading the configurations", ") self.configurations = self.validate() # Converting datetime object to string for date_config in", "'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error while reading the configurations from", "stream: self.configurations = yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception, 'problem_mark'): mark =", "the yaml configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations,", "schema from sharepoint_utils import print_and_log class Configuration: __instance = None def __new__(cls, *args,", "parsing yaml file %s. Error: %s\" % (file_name, exception), ) self.configurations = self.validate()", "%s\" % (file_name, exception), ) self.configurations = self.validate() # Converting datetime object to", "for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties", "configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if", "file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors:", "% (file_name, exception), ) self.configurations = self.validate() # Converting datetime object to string", "self.configurations = yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark", "else: print_and_log( self.logger, \"exception\", \"Something went wrong while parsing yaml file %s. Error:", "YAMLError as exception: if hasattr(exception, 'problem_mark'): mark = exception.problem_mark print_and_log( self.logger, \"exception\", \"Error", "went wrong while parsing yaml file %s. Error: %s\" % (file_name, exception), )", "string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each", "each properties defined in the yaml configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\")", "line %s.\" % (file_name, mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something went wrong", "\"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def validate(self): \"\"\"Validates each properties defined in the yaml", "import yaml from yaml.error import YAMLError from cerberus import Validator from schema import", "self.file_name = file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream)", "datetime object to string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] = self.configurations[date_config].strftime('%Y-%m-%dT%H:%M:%SZ') def", "configuration parameters\") validator = Validator(schema) validator.validate(self.configurations, schema) if validator.errors: print_and_log(self.logger, \"error\", \"Error while", "= object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None): self.logger = logger self.file_name =", "config. Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully validated the config file\") return", "Configuration: __instance = None def __new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance =", "self.logger = logger self.file_name = file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as stream:", "print_and_log( self.logger, \"exception\", \"Error while reading the configurations from %s file at line", "= None def __new__(cls, *args, **kwargs): if not Configuration.__instance: Configuration.__instance = object.__new__(cls) return", "while validating the config. Errors: %s\" % ( validator.errors)) exit(0) self.logger.info(\"Successfully validated the", "at line %s.\" % (file_name, mark.line), ) else: print_and_log( self.logger, \"exception\", \"Something went", "logger=None): self.logger = logger self.file_name = file_name try: with open(file_name, \"r\", encoding=\"utf-8\") as", "print_and_log( self.logger, \"exception\", \"Something went wrong while parsing yaml file %s. Error: %s\"", "# Converting datetime object to string for date_config in [\"start_time\", \"end_time\"]: self.configurations[date_config] =", "import schema from sharepoint_utils import print_and_log class Configuration: __instance = None def __new__(cls,", "in the yaml configuration file \"\"\" self.logger.info(\"Validating the configuration parameters\") validator = Validator(schema)", "object.__new__(cls) return Configuration.__instance def __init__(self, file_name, logger=None): self.logger = logger self.file_name = file_name", "\"r\", encoding=\"utf-8\") as stream: self.configurations = yaml.safe_load(stream) except YAMLError as exception: if hasattr(exception,", "file %s. Error: %s\" % (file_name, exception), ) self.configurations = self.validate() # Converting" ]
[ "JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html')", "= 10.00 non_veg_cost = 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is relative", "JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html')", "JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html') list_template = JINJA_ENVIRONMENT.get_template('list.html') index_template = JINJA_ENVIRONMENT.get_template('index.html')", "veg_cost = 10.00 non_veg_cost = 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is", "autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html')", "= JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template =", "= JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html') list_template = JINJA_ENVIRONMENT.get_template('list.html') index_template =", "app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template =", "to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template", "loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template", "JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'],", "root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html')", "relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html')", "directory is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template", "= JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template =", "jinja2 veg_cost = 10.00 non_veg_cost = 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory", "extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template =", "JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html') list_template = JINJA_ENVIRONMENT.get_template('list.html')", "thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html') list_template", "10.00 non_veg_cost = 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is relative to", "= JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html') list_template =", "pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template", "cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template = JINJA_ENVIRONMENT.get_template('stepbystep.html') email_template = JINJA_ENVIRONMENT.get_template('email_template.html') list_template = JINJA_ENVIRONMENT.get_template('list.html') index_template", "form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template = JINJA_ENVIRONMENT.get_template('paypal.html') thankyou_template = JINJA_ENVIRONMENT.get_template('thankyou.html') cancel_template = JINJA_ENVIRONMENT.get_template('cancel.html') step_by_step_template", "is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html') pay_template =", "import jinja2 veg_cost = 10.00 non_veg_cost = 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates", "10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is relative to app root. loader=jinja2.FileSystemLoader('templates'),", "jinja2.Environment( # templates directory is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template", "templates directory is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template = JINJA_ENVIRONMENT.get_template('form.html')", "= jinja2.Environment( # templates directory is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True)", "# templates directory is relative to app root. loader=jinja2.FileSystemLoader('templates'), extensions=['jinja2.ext.autoescape'], autoescape=True) form_template =", "non_veg_cost = 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is relative to app", "= 10.00 JINJA_ENVIRONMENT = jinja2.Environment( # templates directory is relative to app root." ]
[ "= 'http://' + url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/')", "'http': url = 'http://' + url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir", "'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template Injection Points', 'overview': 'Found {}", "'?' for key in query.keys(): payload = self.set_payload() tmp = deepcopy(query) tmp[key][0] =", "_payload, 'check_str': check_str} def exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1;", "reports, **kwargs): self.results = results self.reports = reports self.args = kwargs self.vulnerable =", "'+' + str(randint2) + '}}' check_str = str(_sum) return {'payload': _payload, 'check_str': check_str}", "32768) _sum = randint1 + randint2 _payload = '{{' + str(randint1) + '+'", "'version': '1.0' } @staticmethod def set_payload(): randint1 = random.randint(32768, 65536) randint2 = random.randint(16384,", "'name': 'Server-Side Template Injector for all', 'version': '1.0' } @staticmethod def set_payload(): randint1", "'check_str': check_str} def exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64;", "split_dir = parse_result.path.split('/') _url = parse_result.scheme + '://' + parse_result.netloc for i in", "{}'.format(url)) attack_url = [] if url[0:4] != 'http': url = 'http://' + url", "parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme +", "SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'], x['payload']], self.vulnerable)) }) logging.info(\"SSTI", "for i in range(1, len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir) split[i] =", "like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI testing on", "len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload'] check_url = _url", "= self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload'] check_url = _url + '/'.join(split)", "list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for test_url", "self.args = kwargs self.vulnerable = [] @staticmethod def meta(): return { 'name': 'Server-Side", "Template Injector for all', 'version': '1.0' } @staticmethod def set_payload(): randint1 = random.randint(32768,", "self.reports.append({ 'title': 'Server Side Template Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header':", "payload['payload'] _query = [] for _key, _value in tmp.items(): _query += list(map(lambda x:", "+ str(randint2) + '}}' check_str = str(_sum) return {'payload': _payload, 'check_str': check_str} def", "check_str = str(_sum) return {'payload': _payload, 'check_str': check_str} def exec(self): headers = {", "__init__(self, results, reports, **kwargs): self.results = results self.reports = reports self.args = kwargs", "x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for test_url in attack_url: req", "vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side", "_url = parse_result.scheme + '://' + parse_result.netloc for i in range(1, len(split_dir)): payload", "_sum = randint1 + randint2 _payload = '{{' + str(randint1) + '+' +", "reports self.args = kwargs self.vulnerable = [] @staticmethod def meta(): return { 'name':", "(KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI testing", "'http://' + url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url", "'&'.join(_query), 'payload': payload}) for test_url in attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str'])", "urllib import parse from copy import deepcopy import random import requests class SSTIDetector:", "_query = [] for _key, _value in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key,", "payload = self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload'] check_url = _url +", "= { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)", "from copy import deepcopy import random import requests class SSTIDetector: def __init__(self, results,", "[] @staticmethod def meta(): return { 'name': 'Server-Side Template Injector for all', 'version':", "url = 'http://' + url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir =", "check_str} def exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)", "url[0:4] != 'http': url = 'http://' + url parse_result = parse.urlparse(url) query =", "query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme + '://' + parse_result.netloc", "{'payload': _payload, 'check_str': check_str} def exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT", "}) self.reports.append({ 'title': 'Server Side Template Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)),", "'{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for test_url in attack_url:", "random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum = randint1 + randint2 _payload =", "str(randint1) + '+' + str(randint2) + '}}' check_str = str(_sum) return {'payload': _payload,", "exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML,", "parse_result.scheme + '://' + parse_result.netloc for i in range(1, len(split_dir)): payload = self.set_payload()", "in range(1, len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload'] check_url", "tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query = [] for _key, _value in", "attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for test_url in attack_url: req = requests.get(test_url['url'],", "logging from urllib import parse from copy import deepcopy import random import requests", "point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'], x['payload']], self.vulnerable)) }) logging.info(\"SSTI scan", "coding:utf-8 import logging from urllib import parse from copy import deepcopy import random", "import deepcopy import random import requests class SSTIDetector: def __init__(self, results, reports, **kwargs):", "'Server Side Template Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'],", "+= parse_result.path + '?' for key in query.keys(): payload = self.set_payload() tmp =", "self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template Injection Points',", "'2924.87 Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url =", "import requests class SSTIDetector: def __init__(self, results, reports, **kwargs): self.results = results self.reports", "self.results = results self.reports = reports self.args = kwargs self.vulnerable = [] @staticmethod", "deepcopy(query) tmp[key][0] = payload['payload'] _query = [] for _key, _value in tmp.items(): _query", "= reports self.args = kwargs self.vulnerable = [] @staticmethod def meta(): return {", "+= list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for", "attack_url = [] if url[0:4] != 'http': url = 'http://' + url parse_result", "attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url:", "if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload':", "SSTIDetector: def __init__(self, results, reports, **kwargs): self.results = results self.reports = reports self.args", "= requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({", "payload}) for test_url in attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1:", "requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url':", "_payload = '{{' + str(randint1) + '+' + str(randint2) + '}}' check_str =", "} for url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url = [] if", "self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url':", "'payload': payload}) for test_url in attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) !=", "deepcopy import random import requests class SSTIDetector: def __init__(self, results, reports, **kwargs): self.results", "_value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for test_url in attack_url: req =", "on {}'.format(url)) attack_url = [] if url[0:4] != 'http': url = 'http://' +", "NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for", "req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url']))", "tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload':", "str(_sum) return {'payload': _payload, 'check_str': check_str} def exec(self): headers = { 'User_Agent': 'Mozilla/5.0", "'title': 'Server Side Template Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path',", "= parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme + '://' + parse_result.netloc for", "url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url = [] if url[0:4] !=", "'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template Injection Points', 'overview':", "!= -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] })", "check_url = _url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path +", "= [] for _key, _value in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x),", "parse from copy import deepcopy import random import requests class SSTIDetector: def __init__(self,", "class SSTIDetector: def __init__(self, results, reports, **kwargs): self.results = results self.reports = reports", "+ '}}' check_str = str(_sum) return {'payload': _payload, 'check_str': check_str} def exec(self): headers", "+ randint2 _payload = '{{' + str(randint1) + '+' + str(randint2) + '}}'", "logging.critical('SSTI testing on {}'.format(url)) attack_url = [] if url[0:4] != 'http': url =", "+ '://' + parse_result.netloc for i in range(1, len(split_dir)): payload = self.set_payload() split", "_url + '&'.join(_query), 'payload': payload}) for test_url in attack_url: req = requests.get(test_url['url'], headers=headers)", "test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template Injection Points', 'overview': 'Found {} SSTI", "_url += parse_result.path + '?' for key in query.keys(): payload = self.set_payload() tmp", "= parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme + '://'", "results self.reports = reports self.args = kwargs self.vulnerable = [] @staticmethod def meta():", "meta(): return { 'name': 'Server-Side Template Injector for all', 'version': '1.0' } @staticmethod", "in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url = [] if url[0:4] != 'http':", "range(1, len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload'] check_url =", "**kwargs): self.results = results self.reports = reports self.args = kwargs self.vulnerable = []", "_query += list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload})", "randint1 = random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum = randint1 + randint2", "return {'payload': _payload, 'check_str': check_str} def exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows", "[] for _key, _value in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x), _value))", "-1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({", "= payload['payload'] _query = [] for _key, _value in tmp.items(): _query += list(map(lambda", "all', 'version': '1.0' } @staticmethod def set_payload(): randint1 = random.randint(32768, 65536) randint2 =", "split = deepcopy(split_dir) split[i] = payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url': check_url,", "copy import deepcopy import random import requests class SSTIDetector: def __init__(self, results, reports,", "Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'],", "check_url, 'payload': payload}) _url += parse_result.path + '?' for key in query.keys(): payload", "Chrome/56.0.' '2924.87 Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url", "logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title':", "} @staticmethod def set_payload(): randint1 = random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum", "Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url))", "parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme + '://' + parse_result.netloc for i", "'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87", "# coding:utf-8 import logging from urllib import parse from copy import deepcopy import", "str(randint2) + '}}' check_str = str(_sum) return {'payload': _payload, 'check_str': check_str} def exec(self):", "Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url = []", "'payload': payload}) _url += parse_result.path + '?' for key in query.keys(): payload =", "= [] @staticmethod def meta(): return { 'name': 'Server-Side Template Injector for all',", "for _key, _value in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url':", "[] if url[0:4] != 'http': url = 'http://' + url parse_result = parse.urlparse(url)", "for all', 'version': '1.0' } @staticmethod def set_payload(): randint1 = random.randint(32768, 65536) randint2", "requests class SSTIDetector: def __init__(self, results, reports, **kwargs): self.results = results self.reports =", "import logging from urllib import parse from copy import deepcopy import random import", "{} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'], x['payload']], self.vulnerable)) })", "set_payload(): randint1 = random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum = randint1 +", "def __init__(self, results, reports, **kwargs): self.results = results self.reports = reports self.args =", "def set_payload(): randint1 = random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum = randint1", "= deepcopy(split_dir) split[i] = payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url': check_url, 'payload':", "randint2 _payload = '{{' + str(randint1) + '+' + str(randint2) + '}}' check_str", "def exec(self): headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36", "kwargs self.vulnerable = [] @staticmethod def meta(): return { 'name': 'Server-Side Template Injector", "for test_url in attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI", "x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query), 'payload': payload}) for test_url in", "return { 'name': 'Server-Side Template Injector for all', 'version': '1.0' } @staticmethod def", "= randint1 + randint2 _payload = '{{' + str(randint1) + '+' + str(randint2)", "_key, _value in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url", "Injector for all', 'version': '1.0' } @staticmethod def set_payload(): randint1 = random.randint(32768, 65536)", "test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template Injection Points', 'overview': 'Found", "+ url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url =", "Side Template Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries':", "+ '&'.join(_query), 'payload': payload}) for test_url in attack_url: req = requests.get(test_url['url'], headers=headers) if", "self.reports = reports self.args = kwargs self.vulnerable = [] @staticmethod def meta(): return", "in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url + '&'.join(_query),", "detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server", "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url in self.results['urls']: logging.critical('SSTI", "'1.0' } @staticmethod def set_payload(): randint1 = random.randint(32768, 65536) randint2 = random.randint(16384, 32768)", "import parse from copy import deepcopy import random import requests class SSTIDetector: def", "results, reports, **kwargs): self.results = results self.reports = reports self.args = kwargs self.vulnerable", "= random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum = randint1 + randint2 _payload", "req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload']", "headers = { 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like", "@staticmethod def set_payload(): randint1 = random.randint(32768, 65536) randint2 = random.randint(16384, 32768) _sum =", "parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme + '://' +", "def meta(): return { 'name': 'Server-Side Template Injector for all', 'version': '1.0' }", "in attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable", "in query.keys(): payload = self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query =", "+ '+' + str(randint2) + '}}' check_str = str(_sum) return {'payload': _payload, 'check_str':", "@staticmethod def meta(): return { 'name': 'Server-Side Template Injector for all', 'version': '1.0'", "for key in query.keys(): payload = self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload']", "_value in tmp.items(): _query += list(map(lambda x: '{}={}'.format(_key, x), _value)) attack_url.append({'url': _url +", "random.randint(16384, 32768) _sum = randint1 + randint2 _payload = '{{' + str(randint1) +", "parse_result.path + '?' for key in query.keys(): payload = self.set_payload() tmp = deepcopy(query)", "'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'], x['payload']], self.vulnerable)) }) logging.info(\"SSTI scan finished!\")", "'://' + parse_result.netloc for i in range(1, len(split_dir)): payload = self.set_payload() split =", "{}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template Injection", "= payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url +=", "= _url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path + '?'", "+ '?' for key in query.keys(): payload = self.set_payload() tmp = deepcopy(query) tmp[key][0]", "attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path + '?' for key in query.keys():", "'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'], x['payload']], self.vulnerable))", "_url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path + '?' for", "= parse_result.path.split('/') _url = parse_result.scheme + '://' + parse_result.netloc for i in range(1,", "from urllib import parse from copy import deepcopy import random import requests class", "= str(_sum) return {'payload': _payload, 'check_str': check_str} def exec(self): headers = { 'User_Agent':", "payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path", "self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query = [] for _key, _value", "= self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query = [] for _key,", "+ str(randint1) + '+' + str(randint2) + '}}' check_str = str(_sum) return {'payload':", "randint1 + randint2 _payload = '{{' + str(randint1) + '+' + str(randint2) +", "= [] if url[0:4] != 'http': url = 'http://' + url parse_result =", "parse_result.path.split('/') _url = parse_result.scheme + '://' + parse_result.netloc for i in range(1, len(split_dir)):", "= '{{' + str(randint1) + '+' + str(randint2) + '}}' check_str = str(_sum)", "{ 'User_Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.'", "randint2 = random.randint(16384, 32768) _sum = randint1 + randint2 _payload = '{{' +", "payload}) _url += parse_result.path + '?' for key in query.keys(): payload = self.set_payload()", "query.keys(): payload = self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query = []", "deepcopy(split_dir) split[i] = payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload})", "random import requests class SSTIDetector: def __init__(self, results, reports, **kwargs): self.results = results", "self.vulnerable = [] @staticmethod def meta(): return { 'name': 'Server-Side Template Injector for", "split[i] = payload['payload'] check_url = _url + '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url", "Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x:", "import random import requests class SSTIDetector: def __init__(self, results, reports, **kwargs): self.results =", "url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query) split_dir = parse_result.path.split('/') _url = parse_result.scheme", "65536) randint2 = random.randint(16384, 32768) _sum = randint1 + randint2 _payload = '{{'", "'/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path + '?' for key in", "tmp[key][0] = payload['payload'] _query = [] for _key, _value in tmp.items(): _query +=", "= parse_result.scheme + '://' + parse_result.netloc for i in range(1, len(split_dir)): payload =", "testing on {}'.format(url)) attack_url = [] if url[0:4] != 'http': url = 'http://'", "headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected: vulnerable url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'],", "if url[0:4] != 'http': url = 'http://' + url parse_result = parse.urlparse(url) query", "= deepcopy(query) tmp[key][0] = payload['payload'] _query = [] for _key, _value in tmp.items():", "= results self.reports = reports self.args = kwargs self.vulnerable = [] @staticmethod def", "i in range(1, len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir) split[i] = payload['payload']", "+ '/'.join(split) attack_url.append({'url': check_url, 'payload': payload}) _url += parse_result.path + '?' for key", "{ 'name': 'Server-Side Template Injector for all', 'version': '1.0' } @staticmethod def set_payload():", "'Server-Side Template Injector for all', 'version': '1.0' } @staticmethod def set_payload(): randint1 =", "6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url", "x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url in self.results['urls']:", "= random.randint(16384, 32768) _sum = randint1 + randint2 _payload = '{{' + str(randint1)", "self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url = [] if url[0:4] != 'http': url", "parse_result.netloc for i in range(1, len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir) split[i]", "'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda x: [x['url'], x['payload']],", "url: {}'.format(test_url['url'])) self.vulnerable.append({ 'url': test_url['url'], 'payload': test_url['payload']['payload'] }) self.reports.append({ 'title': 'Server Side Template", "(Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' }", "'}}' check_str = str(_sum) return {'payload': _payload, 'check_str': check_str} def exec(self): headers =", "test_url in attack_url: req = requests.get(test_url['url'], headers=headers) if req.text.find(test_url['payload']['check_str']) != -1: logging.critical('SSTI detected:", "payload = self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query = [] for", "+ parse_result.netloc for i in range(1, len(split_dir)): payload = self.set_payload() split = deepcopy(split_dir)", "key in query.keys(): payload = self.set_payload() tmp = deepcopy(query) tmp[key][0] = payload['payload'] _query", "!= 'http': url = 'http://' + url parse_result = parse.urlparse(url) query = parse.parse_qs(parse_result.query)", "'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36'", "'{{' + str(randint1) + '+' + str(randint2) + '}}' check_str = str(_sum) return", "Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.' '2924.87 Safari/537.36' } for url in", "for url in self.results['urls']: logging.critical('SSTI testing on {}'.format(url)) attack_url = [] if url[0:4]", "Template Injection Points', 'overview': 'Found {} SSTI point(s)'.format(len(self.vulnerable)), 'header': ['Path', 'Payload'], 'entries': list(map(lambda", "= kwargs self.vulnerable = [] @staticmethod def meta(): return { 'name': 'Server-Side Template" ]
[ "= 'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1'", "caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data()", "try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage", "MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\",", "print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \" + str(e), caption=\"Error\",", "for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START +", "MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for i in range(len(encoding_translate_config)):", "crutch = wx.Frame(None, -1, \"\") #TODO change dir to this script dir sys_tray", "TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__ =", "class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for i in", "yaml from lib import clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT = 1025", "2000 __version__ = '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\",", "\"\") #TODO change dir to this script dir sys_tray = MyTaskBarIcon() icon =", "программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array", "= dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def", "onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId()", "menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app =", "for i in range(len(text_array)): if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text =", "menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT)", "text = clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for", "\"Неудалось запустить программу. Код ошибки: \" + str(e), caption=\"Error\", style=wx.OK|wx.CENTRE|wx.ICON_ERROR, pos=wx.DefaultPosition) dialog.ShowModal() app.MainLoop()", "Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход',", "e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \" + str(e),", "dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self):", "encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except FileNotFoundError: None except Exception", "wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId() -", "\"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout,", "script dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data())", "= clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i", "i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i)", "self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch = wx.Frame(None, -1, \"\")", "menu = wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU,", "= wx.Frame(None, -1, \"\") #TODO change dir to this script dir sys_tray =", "ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO", "encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if text_array[i] in dictionary: text_array[i]", "wx import wx.adv import yaml from lib import clipboard TRAY_TOOLTIP = 'System Tray", "= \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu =", "wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO change dir to this script dir", "in range(len(text_array)): if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text)", "text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__()", "pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array = list(text)", "wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh:", "TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config =", "print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if", "from lib import clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START", "dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array = list(text) dictionary", "import wx import wx.adv import yaml from lib import clipboard TRAY_TOOLTIP = 'System", "print(text_array) for i in range(len(text_array)): if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text", "'0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal()", "= wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete,", "import yaml from lib import clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT =", "i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О", "CreatePopupMenu(self): menu = wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\")", "list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if text_array[i]", "TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]]", "if codepage in encoding_translate_config[i] except FileNotFoundError: None except Exception as e: print(str(e)) dialog", "\"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text", "TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\",", "return menu app = wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO change dir", "clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for", "in dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def", "super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i,", "= encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if text_array[i] in dictionary:", "'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False)", "<filename>main.py<gh_stars>0 import wx import wx.adv import yaml from lib import clipboard TRAY_TOOLTIP =", "as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except FileNotFoundError: None", "\"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch", "'', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU,", "onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch = wx.Frame(None,", "except FileNotFoundError: None except Exception as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить", "wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \" + str(e), caption=\"Error\", style=wx.OK|wx.CENTRE|wx.ICON_ERROR, pos=wx.DefaultPosition) dialog.ShowModal()", "-1, \"\") #TODO change dir to this script dir sys_tray = MyTaskBarIcon() icon", "print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for i", "id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\")", "ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def ShowAbout(event): dialog =", "def onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array = list(text) dictionary =", "import clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000", "= MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\",", "menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT)", "def CreatePopupMenu(self): menu = wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"],", "Exception as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \"", "1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал", "to this script dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP,", "encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе',", "= 2000 __version__ = '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О", "self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch =", "clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__", "= wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as", "программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu", "menu app = wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO change dir to", "FileNotFoundError: None except Exception as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу.", "change dir to this script dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon,", "this script dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu)", "Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def ShowAbout(event): dialog", "open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i]", "__init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START +", "- TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if text_array[i] in dictionary: text_array[i] =", "i in range(len(text_array)): if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array)", "= 1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None,", "menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\")", "id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch = wx.Frame(None, -1,", "except Exception as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки:", "\"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu()", "i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit,", "style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text = clipboard.get_clipboard_data() text_array =", "#sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh)", "wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START", "'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return", "+ i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU,", "#TODO change dir to this script dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\"))", "if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class", "wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event):", "text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self): super().__init__() def CreatePopupMenu(self): menu", "import wx.adv import yaml from lib import clipboard TRAY_TOOLTIP = 'System Tray Demo'", "wx.adv import yaml from lib import clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT", "Translete(event): text = clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array)", "icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\")", "range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '',", "\"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\")", "= wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \" + str(e), caption=\"Error\", style=wx.OK|wx.CENTRE|wx.ICON_ERROR, pos=wx.DefaultPosition)", "ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event):", "text_array = list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)):", "self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT, 'О программе', \"\") menu.Append(wx.ID_EXIT,", "def Translete(event): text = clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"]", "text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon):", "with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in", "\"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except", "in encoding_translate_config[i] except FileNotFoundError: None except Exception as e: print(str(e)) dialog = wx.MessageDialog(None,", "\"\") menu.Append(wx.ID_EXIT, 'Выход', \"\") self.Bind(wx.EVT_MENU, onExit, id=wx.ID_EXIT) self.Bind(wx.EVT_MENU, ShowAbout, id=ID_MENU_ABOUT) return menu app", "sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with", "+ i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR, '', \"\") menu.Append(ID_MENU_ABOUT,", "def __init__(self): super().__init__() def CreatePopupMenu(self): menu = wx.Menu() for i in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START", "sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config", "= '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition)", "dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit()", "#TODO if codepage in encoding_translate_config[i] except FileNotFoundError: None except Exception as e: print(str(e))", "Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def ShowAbout(event):", "codepage in encoding_translate_config[i] except FileNotFoundError: None except Exception as e: print(str(e)) dialog =", "dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \" + str(e), caption=\"Error\", style=wx.OK|wx.CENTRE|wx.ICON_ERROR,", "encoding_translate_config[i] except FileNotFoundError: None except Exception as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось", "None except Exception as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код", "as e: print(str(e)) dialog = wx.MessageDialog(None, \"Неудалось запустить программу. Код ошибки: \" +", "= wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def", "clipboard.get_clipboard_data() text_array = list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in", "range(len(text_array)): if text_array[i] in dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text)", "id=ID_MENU_ABOUT) return menu app = wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO change", "fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except FileNotFoundError: None except", "showInfoMenu) print(clipboard.get_clipboard_data()) try: with open(\"./assets/sw_templates.yml\", \"r\", encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO", "<NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def onExit(event): wx.Exit() def Translete(event): text =", "lib import clipboard TRAY_TOOLTIP = 'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START =", "def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE, pos=wx.DefaultPosition) dialog.ShowModal() def", "__version__ = '0.0.1' def ShowAbout(event): dialog = wx.MessageDialog(None, \"Создал <NAME>\", caption=\"О программе\", style=wx.OK|wx.CENTRE,", "= yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except FileNotFoundError: None except Exception as", "app = wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO change dir to this", "dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP) #sys_tray.Bind(wx.adv.EVT_TASKBAR_RIGHT_UP, showInfoMenu) print(clipboard.get_clipboard_data()) try:", "dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if text_array[i] in", "= wx.App(False) crutch = wx.Frame(None, -1, \"\") #TODO change dir to this script", "wx.Frame(None, -1, \"\") #TODO change dir to this script dir sys_tray = MyTaskBarIcon()", "dictionary: text_array[i] = dictionary[text_array[i]] text = \"\".join(text_array) clipboard.set_clipboard_data(text) print(text) class MyTaskBarIcon(wx.adv.TaskBarIcon): def __init__(self):", "= list(text) dictionary = encoding_translate_config[event.GetId() - TRANSLATE_EVENTS_START][\"dictionary\"] print(text_array) for i in range(len(text_array)): if", "dir to this script dir sys_tray = MyTaskBarIcon() icon = wx.Icon(wx.Bitmap(\"./assets/icon.png\")) sys_tray.SetIcon(icon, TRAY_TOOLTIP)", "in range(len(encoding_translate_config)): menu.Append(TRANSLATE_EVENTS_START + i, encoding_translate_config[i][\"title\"], \"\") self.Bind(wx.EVT_MENU, Translete, id=TRANSLATE_EVENTS_START + i) menu.Append(wx.ID_SEPARATOR,", "yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except FileNotFoundError: None except Exception as e:", "encoding=\"utf8\") as fh: encoding_translate_config = yaml.safe_load(fh) #TODO if codepage in encoding_translate_config[i] except FileNotFoundError:", "'System Tray Demo' ID_MENU_ABOUT = 1025 TRANSLATE_EVENTS_START = 2000 __version__ = '0.0.1' def" ]
[ "have a JIRA # ticket to track the future change. import argparse import", "# Ensuring all TODOs are either complete when opening the PR, or have", "tag with --regex\") # Figure out what regex to use tag = args.tag", "Ensuring all TODOs are either complete when opening the PR, or have a", "followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f", "help='Files to search') args = parser.parse_args() # Do not allow specifying a tag", "to search') args = parser.parse_args() # Do not allow specifying a tag and", "form: # # TODO(DEV-1234): some text # # Ensuring all TODOs are either", "opening the PR, or have a JIRA # ticket to track the future", "allow specifying a tag and --regex if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot", "args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") # Figure out", "= re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f in args.files: for i,", "parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag')", "regex to use tag = args.tag + \"-[0-9]+\" if args.regex: tag = args.regex", "#!/usr/bin/env python3 -s # Searches passed files for TODO comments. # # Prints", "argparse import re import sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag',", "type=str, help=\"A JIRA-like tag (i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex',", "if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") # Figure", "TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search') args = parser.parse_args() #", "This regex matches all TODO comments (prefixed by // or #) that are", "exits non-zero) if any TODO comments are not in the form: # #", "for i, line in enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s %s' %", "f in args.files: for i, line in enumerate(open(f)): for match in re.finditer(pattern, line):", "specifying a tag and --regex if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide", "to use tag = args.tag + \"-[0-9]+\" if args.regex: tag = args.regex #", "all TODO comments (prefixed by // or #) that are not # immediately", "are not # immediately followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret", "files for TODO comments. # # Prints out (and exits non-zero) if any", "args.files: for i, line in enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s %s'", "TODO comments (prefixed by // or #) that are not # immediately followed", "track the future change. import argparse import re import sys DEFAULT_TAG = \"DEV\"", "// or #) that are not # immediately followed by \"($TAG)\" pattern =", "inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search') args =", "in re.finditer(pattern, line): print('%s:%s %s' % (f, i+1, match.string.strip())) ret += 1 exit(ret)", "metavar='FILES', type=str, nargs='+', help='Files to search') args = parser.parse_args() # Do not allow", "# Figure out what regex to use tag = args.tag + \"-[0-9]+\" if", "args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") # Figure out what regex", "'--tag', type=str, help=\"A JIRA-like tag (i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r',", "sys.exit(\"cannot provide tag with --regex\") # Figure out what regex to use tag", "if any TODO comments are not in the form: # # TODO(DEV-1234): some", "JIRA # ticket to track the future change. import argparse import re import", "type=str, nargs='+', help='Files to search') args = parser.parse_args() # Do not allow specifying", "match in re.finditer(pattern, line): print('%s:%s %s' % (f, i+1, match.string.strip())) ret += 1", "and --regex if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\")", "default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to match inner in TODO(inner)\",", "the PR, or have a JIRA # ticket to track the future change.", "Do not allow specifying a tag and --regex if args.regex and args.tag !=", "for match in re.finditer(pattern, line): print('%s:%s %s' % (f, i+1, match.string.strip())) ret +=", "or have a JIRA # ticket to track the future change. import argparse", "-s # Searches passed files for TODO comments. # # Prints out (and", "# This regex matches all TODO comments (prefixed by // or #) that", "are not in the form: # # TODO(DEV-1234): some text # # Ensuring", "provide tag with --regex\") # Figure out what regex to use tag =", "help=\"Specify the regex to match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+',", "tag = args.regex # This regex matches all TODO comments (prefixed by //", "all TODOs are either complete when opening the PR, or have a JIRA", "tag (i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the", "<filename>todo-tags.py #!/usr/bin/env python3 -s # Searches passed files for TODO comments. # #", "match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search') args", "pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f in args.files: for", "immediately followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for", "are either complete when opening the PR, or have a JIRA # ticket", "text # # Ensuring all TODOs are either complete when opening the PR,", "in the form: # # TODO(DEV-1234): some text # # Ensuring all TODOs", "re import sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A", "matches all TODO comments (prefixed by // or #) that are not #", "TODO(DEV-1234): some text # # Ensuring all TODOs are either complete when opening", "the regex to match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files", "= args.regex # This regex matches all TODO comments (prefixed by // or", "when opening the PR, or have a JIRA # ticket to track the", "either complete when opening the PR, or have a JIRA # ticket to", "enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s %s' % (f, i+1, match.string.strip())) ret", "# ticket to track the future change. import argparse import re import sys", "dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to match inner in TODO(inner)\", dest='regex')", "parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search') args = parser.parse_args() # Do not", "sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag", "TODOs are either complete when opening the PR, or have a JIRA #", "in enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s %s' % (f, i+1, match.string.strip()))", "args.regex # This regex matches all TODO comments (prefixed by // or #)", "change. import argparse import re import sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser()", "any TODO comments are not in the form: # # TODO(DEV-1234): some text", "--regex if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") #", "args.tag + \"-[0-9]+\" if args.regex: tag = args.regex # This regex matches all", "DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to", "(prefixed by // or #) that are not # immediately followed by \"($TAG)\"", "TODO comments are not in the form: # # TODO(DEV-1234): some text #", "ticket to track the future change. import argparse import re import sys DEFAULT_TAG", "# TODO(DEV-1234): some text # # Ensuring all TODOs are either complete when", "#) that are not # immediately followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\",", "PR, or have a JIRA # ticket to track the future change. import", "+ \"-[0-9]+\" if args.regex: tag = args.regex # This regex matches all TODO", "complete when opening the PR, or have a JIRA # ticket to track", "to track the future change. import argparse import re import sys DEFAULT_TAG =", "import sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like", "argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e. DEV) to search for\", default=DEFAULT_TAG,", "regex matches all TODO comments (prefixed by // or #) that are not", "TODO comments. # # Prints out (and exits non-zero) if any TODO comments", "import re import sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str,", "and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") # Figure out what", "# Searches passed files for TODO comments. # # Prints out (and exits", "the form: # # TODO(DEV-1234): some text # # Ensuring all TODOs are", "regex to match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to", "not in the form: # # TODO(DEV-1234): some text # # Ensuring all", "!= DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") # Figure out what regex to", "by // or #) that are not # immediately followed by \"($TAG)\" pattern", "nargs='+', help='Files to search') args = parser.parse_args() # Do not allow specifying a", "\"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e. DEV) to", "tag = args.tag + \"-[0-9]+\" if args.regex: tag = args.regex # This regex", "parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to match inner in TODO(inner)\", dest='regex') parser.add_argument('files',", "i, line in enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s %s' % (f,", "(i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex", "the future change. import argparse import re import sys DEFAULT_TAG = \"DEV\" parser", "Figure out what regex to use tag = args.tag + \"-[0-9]+\" if args.regex:", "Prints out (and exits non-zero) if any TODO comments are not in the", "= parser.parse_args() # Do not allow specifying a tag and --regex if args.regex", "type=str, help=\"Specify the regex to match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str,", "in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search') args = parser.parse_args()", "future change. import argparse import re import sys DEFAULT_TAG = \"DEV\" parser =", "= \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e. DEV)", "parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e. DEV) to search", "args = parser.parse_args() # Do not allow specifying a tag and --regex if", "# Do not allow specifying a tag and --regex if args.regex and args.tag", "for TODO comments. # # Prints out (and exits non-zero) if any TODO", "not allow specifying a tag and --regex if args.regex and args.tag != DEFAULT_TAG:", "# Prints out (and exits non-zero) if any TODO comments are not in", "non-zero) if any TODO comments are not in the form: # # TODO(DEV-1234):", "(and exits non-zero) if any TODO comments are not in the form: #", "re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f in args.files: for i, line", "search') args = parser.parse_args() # Do not allow specifying a tag and --regex", "JIRA-like tag (i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify", "import argparse import re import sys DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t',", "ret = 0 for f in args.files: for i, line in enumerate(open(f)): for", "what regex to use tag = args.tag + \"-[0-9]+\" if args.regex: tag =", "# # TODO(DEV-1234): some text # # Ensuring all TODOs are either complete", "DEFAULT_TAG: sys.exit(\"cannot provide tag with --regex\") # Figure out what regex to use", "tag and --regex if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag with", "dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search') args = parser.parse_args() # Do", "# # Ensuring all TODOs are either complete when opening the PR, or", "to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to match", "not # immediately followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret =", "# # Prints out (and exits non-zero) if any TODO comments are not", "for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to match inner in", "\"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f in args.files:", "by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f in", "Searches passed files for TODO comments. # # Prints out (and exits non-zero)", "comments are not in the form: # # TODO(DEV-1234): some text # #", "or #) that are not # immediately followed by \"($TAG)\" pattern = re.compile(", "# immediately followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0", "line in enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s %s' % (f, i+1,", "= argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e. DEV) to search for\",", "that are not # immediately followed by \"($TAG)\" pattern = re.compile( r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE)", "parser.parse_args() # Do not allow specifying a tag and --regex if args.regex and", "\"-[0-9]+\" if args.regex: tag = args.regex # This regex matches all TODO comments", "args.regex: tag = args.regex # This regex matches all TODO comments (prefixed by", "= 0 for f in args.files: for i, line in enumerate(open(f)): for match", "if args.regex: tag = args.regex # This regex matches all TODO comments (prefixed", "comments. # # Prints out (and exits non-zero) if any TODO comments are", "'--regex', type=str, help=\"Specify the regex to match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES',", "in args.files: for i, line in enumerate(open(f)): for match in re.finditer(pattern, line): print('%s:%s", "r\"(\\/\\/|#)+\\s?TODO((?!\\(\"+tag+r\"\\)).)*$\", re.IGNORECASE) ret = 0 for f in args.files: for i, line in", "help=\"A JIRA-like tag (i.e. DEV) to search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str,", "= args.tag + \"-[0-9]+\" if args.regex: tag = args.regex # This regex matches", "0 for f in args.files: for i, line in enumerate(open(f)): for match in", "to match inner in TODO(inner)\", dest='regex') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='Files to search')", "--regex\") # Figure out what regex to use tag = args.tag + \"-[0-9]+\"", "out what regex to use tag = args.tag + \"-[0-9]+\" if args.regex: tag", "for f in args.files: for i, line in enumerate(open(f)): for match in re.finditer(pattern,", "a tag and --regex if args.regex and args.tag != DEFAULT_TAG: sys.exit(\"cannot provide tag", "use tag = args.tag + \"-[0-9]+\" if args.regex: tag = args.regex # This", "comments (prefixed by // or #) that are not # immediately followed by", "DEFAULT_TAG = \"DEV\" parser = argparse.ArgumentParser() parser.add_argument('-t', '--tag', type=str, help=\"A JIRA-like tag (i.e.", "re.IGNORECASE) ret = 0 for f in args.files: for i, line in enumerate(open(f)):", "out (and exits non-zero) if any TODO comments are not in the form:", "a JIRA # ticket to track the future change. import argparse import re", "with --regex\") # Figure out what regex to use tag = args.tag +", "search for\", default=DEFAULT_TAG, dest='tag') parser.add_argument('-r', '--regex', type=str, help=\"Specify the regex to match inner", "some text # # Ensuring all TODOs are either complete when opening the", "python3 -s # Searches passed files for TODO comments. # # Prints out", "passed files for TODO comments. # # Prints out (and exits non-zero) if" ]
[ "############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP,", "in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer in self.layers: x", "0.5 * (1 + leak) f2 = 0.5 * (1 - leak) return", "nn.ModuleList() in_ = input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size", "size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer in self.layers: x = self.activation(layer(x))", "self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1 = 0.5 * (1 + leak)", "############################################ class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__()", "super(MLP, self).__init__() self.activation = activation self.output_activation = output_activation self.layers = nn.ModuleList() in_ =", "(1 + leak) f2 = 0.5 * (1 - leak) return f1 *", "leak) f2 = 0.5 * (1 - leak) return f1 * x +", "for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self,", "= input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size))", "else: return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1 = 0.5 * (1", "leak=0.2): f1 = 0.5 * (1 + leak) f2 = 0.5 * (1", "import os ############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh,", "def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation", "import torch from torch import nn import os ############################################ ############################################ class MLP(nn.Module): def", "i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self, x):", "= 0.5 * (1 - leak) return f1 * x + f2 *", "= nn.ModuleList() in_ = input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ =", "x): for layer in self.layers: x = self.activation(layer(x)) if not self.output_activation: return x", "MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation =", "input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def", "in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for", "############################################ ############################################ def lrelu(x, leak=0.2): f1 = 0.5 * (1 + leak) f2", "x = self.activation(layer(x)) if not self.output_activation: return x else: return self.output_activation(x) ############################################ ############################################", "return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1 = 0.5 * (1 +", "forward(self, x): for layer in self.layers: x = self.activation(layer(x)) if not self.output_activation: return", "range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer", "self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer in self.layers: x = self.activation(layer(x)) if", "n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation = output_activation self.layers", "os ############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None):", "output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation = output_activation", "* (1 + leak) f2 = 0.5 * (1 - leak) return f1", "output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation = output_activation self.layers = nn.ModuleList() in_", "= 0.5 * (1 + leak) f2 = 0.5 * (1 - leak)", "f2 = 0.5 * (1 - leak) return f1 * x + f2", "f1 = 0.5 * (1 + leak) f2 = 0.5 * (1 -", "in self.layers: x = self.activation(layer(x)) if not self.output_activation: return x else: return self.output_activation(x)", "layer in self.layers: x = self.activation(layer(x)) if not self.output_activation: return x else: return", "torch from torch import nn import os ############################################ ############################################ class MLP(nn.Module): def __init__(self,", "not self.output_activation: return x else: return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1", "= size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer in self.layers: x =", "lrelu(x, leak=0.2): f1 = 0.5 * (1 + leak) f2 = 0.5 *", "= output_activation self.layers = nn.ModuleList() in_ = input_size for i in range(n_layers): self.layers.append(nn.Linear(in_,", "in_ = input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size,", "############################################ def lrelu(x, leak=0.2): f1 = 0.5 * (1 + leak) f2 =", "self.layers = nn.ModuleList() in_ = input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size)) in_", "if not self.output_activation: return x else: return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2):", "return x else: return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1 = 0.5", "class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation", "self).__init__() self.activation = activation self.output_activation = output_activation self.layers = nn.ModuleList() in_ = input_size", "= activation self.output_activation = output_activation self.layers = nn.ModuleList() in_ = input_size for i", "size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer in self.layers:", "input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation =", "nn import os ############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers, size,", "self.output_activation = output_activation self.layers = nn.ModuleList() in_ = input_size for i in range(n_layers):", "self.activation = activation self.output_activation = output_activation self.layers = nn.ModuleList() in_ = input_size for", "self.layers.append(nn.Linear(in_, size)) in_ = size self.layers.append(nn.Linear(size, output_size)) def forward(self, x): for layer in", "self.layers: x = self.activation(layer(x)) if not self.output_activation: return x else: return self.output_activation(x) ############################################", "activation self.output_activation = output_activation self.layers = nn.ModuleList() in_ = input_size for i in", "import nn import os ############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size, output_size, n_layers,", "size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation = output_activation self.layers =", "for layer in self.layers: x = self.activation(layer(x)) if not self.output_activation: return x else:", "self.activation(layer(x)) if not self.output_activation: return x else: return self.output_activation(x) ############################################ ############################################ def lrelu(x,", "0.5 * (1 - leak) return f1 * x + f2 * abs(x)", "__init__(self, input_size, output_size, n_layers, size, activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation", "= self.activation(layer(x)) if not self.output_activation: return x else: return self.output_activation(x) ############################################ ############################################ def", "self.output_activation: return x else: return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1 =", "def lrelu(x, leak=0.2): f1 = 0.5 * (1 + leak) f2 = 0.5", "activation=torch.tanh, output_activation=None): super(MLP, self).__init__() self.activation = activation self.output_activation = output_activation self.layers = nn.ModuleList()", "+ leak) f2 = 0.5 * (1 - leak) return f1 * x", "from torch import nn import os ############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size,", "output_size)) def forward(self, x): for layer in self.layers: x = self.activation(layer(x)) if not", "torch import nn import os ############################################ ############################################ class MLP(nn.Module): def __init__(self, input_size, output_size,", "output_activation self.layers = nn.ModuleList() in_ = input_size for i in range(n_layers): self.layers.append(nn.Linear(in_, size))", "x else: return self.output_activation(x) ############################################ ############################################ def lrelu(x, leak=0.2): f1 = 0.5 *", "def forward(self, x): for layer in self.layers: x = self.activation(layer(x)) if not self.output_activation:" ]
[ "def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element ''' if not driver: driver", "element ''' if not driver: driver = self._driver print(f\"locator : {locator}\") if not", "EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target}", "self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element ''' if not driver:", "def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not", "print(f\"locator : {locator}\") if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target", "wait, found element {target} \") ## module level driver instance driver = BasePage._driver", "BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url)", ": {locator}\") if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target =", "return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element ''' if not", "open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None):", "not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom", "= BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None):", "print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url)", "BrowserManager import conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC", "<reponame>sohailchd/RobotAndLocust from utilities.BrowserManager import BrowserManager import conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support", ") else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target} \")", "print(f\"exlicit wait, found element {target} \") ## module level driver instance driver =", "url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait", "BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if", "BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url", "if not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): '''", "driver = self._driver print(f\"locator : {locator}\") if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator)", ") print(f\"exlicit wait, found element {target} \") ## module level driver instance driver", "driver: driver = self._driver print(f\"locator : {locator}\") if not is_visible: target = WebDriverWait(driver,time).until(", "not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) )", "wait for given element ''' if not driver: driver = self._driver print(f\"locator :", "import BrowserManager import conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as", "get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element ''' if", "{locator}\") if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until(", "conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class BasePage():", "called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url)", "from selenium.webdriver.support import expected_conditions as EC class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url):", "if not driver: driver = self._driver print(f\"locator : {locator}\") if not is_visible: target", "WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target} \") ## module level driver", "= url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self):", "EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target} \") ## module level driver instance", "init called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url) else:", "else: self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given", "given element ''' if not driver: driver = self._driver print(f\"locator : {locator}\") if", "import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class BasePage(): _driver = BrowserManager.get_browser()", "''' if not driver: driver = self._driver print(f\"locator : {locator}\") if not is_visible:", "target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target} \") ## module", "if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator)", "for given element ''' if not driver: driver = self._driver print(f\"locator : {locator}\")", "custom wait for given element ''' if not driver: driver = self._driver print(f\"locator", "self._driver print(f\"locator : {locator}\") if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else:", "def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element '''", "_driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url) def", "EC class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url =", "WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element", "def open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url def", "import expected_conditions as EC class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init", "as EC class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url", "selenium.webdriver.support import expected_conditions as EC class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page", "selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class BasePage(): _driver =", "url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return", "target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait,", "self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url: self._driver.get(self.page_url) else: self._driver.get(url) def", "WebDriverWait from selenium.webdriver.support import expected_conditions as EC class BasePage(): _driver = BrowserManager.get_browser() def", "= self._driver print(f\"locator : {locator}\") if not is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) )", "from utilities.BrowserManager import BrowserManager import conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import", "class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url", "self._driver.get(self.page_url) else: self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for", "__init__(self,url=conf.base_url): print(\"base_page init called...\") self.page_url = url BasePage._driver.get(self.page_url) def open_page(self,url=None): if not url:", "import conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class", "= WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found", "= WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target} \") ## module level", "is_visible: target = WebDriverWait(driver,time).until( EC.presence_of_element_located(locator) ) else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit", "from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class BasePage(): _driver", "explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element ''' if not driver: driver =", "else: target = WebDriverWait(driver,time).until( EC.visibility_of_element_located(locator) ) print(f\"exlicit wait, found element {target} \") ##", "''' custom wait for given element ''' if not driver: driver = self._driver", "expected_conditions as EC class BasePage(): _driver = BrowserManager.get_browser() def __init__(self,url=conf.base_url): print(\"base_page init called...\")", "utilities.BrowserManager import BrowserManager import conf from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions", "not driver: driver = self._driver print(f\"locator : {locator}\") if not is_visible: target =", "self._driver.get(url) def get_page_url(self): return self.page_url def explicit_wait(self,locator,time=20,is_visible=False,driver=None): ''' custom wait for given element" ]
[ "None: query = self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise", "Unless required by applicable law or agreed to in writing, software # distributed", "__init__(self, retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry self.__iteration =", "{addr} failed in\" f\" {client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError as e:", "-> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self) -> str: return self._params.database def", "TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction is already committed') if self._state is", "None loop = asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop", "self._state = TransactionState.STARTED async def commit(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot", "{opname}; the transaction is not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state", "self._log_listeners: loop = asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self):", "= TransactionState.COMMITTED async def rollback(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually", "credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca,", "str = None, user: str = None, password: str = None, database: str", "transaction is not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED:", "qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def _fetchall( self,", "{self._id};' else: query = 'ROLLBACK;' return query async def start(self) -> None: query", "= False self._next_backoff = 0 self._options = connection._options def _retry(self, exc): self._last_exception =", "All name resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError", "connection self._iteration = 0 self._done = False self._next_backoff = 0 self._options = connection._options", "connection): self._connection = connection self._iteration = 0 self._done = False self._next_backoff = 0", "abstract.QueryCache: return self._query_cache async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _ =", "self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop() for cb in self._log_listeners:", "`async with transaction:`\" ) if not self.__started: self.__started = True if self._connection.is_closed(): await", "-> abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _", "for it? if ( extype is not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY)", "COMMITTED = 2 ROLLEDBACK = 3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER =", "the transaction is in error state') def __check_state(self, opname): if self._state is not", "__aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already in an `async with`", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "result, _ = await self.raw_query(query_context) return result async def execute(self, query: str) ->", "commit(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually commit from within an", "_ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only managed retriable transactions are supported. \"", "_ = await self._connection.raw_query(query_context) return result async def execute(self, query: str) -> None:", "errors.InterfaceError( f'cannot {opname}; the transaction is not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start')", "errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e: raise con_utils.wrap_error(e) from e except Exception:", "= start + client_config.wait_until_available iteration = 1 while True: addr = self._params.address try:", "return con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def _query(self, query_context: abstract.QueryContext):", "= self._test_no_tls con._params = self._params return con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache", "implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query: str, *args,", "dsn: str = None, host: str = None, port: int = None, credentials:", "self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params", "loop ) try: if isinstance(addr, str): # UNIX socket tr, pr = await", "if single_attempt: max_time = 0 else: max_time = start + client_config.wait_until_available iteration =", "source file is part of the EdgeDB open source project. # # Copyright", "None: tr.close() raise self._protocol = pr def retrying_transaction(self) -> Retry: return Retry(self) def", "self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.STARTED async def", "= None, credentials_file: str = None, user: str = None, password: str =", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "status = self._protocol.last_status if status is not None: status = status.decode() return status", "`async with` block') await self._rollback() async def _rollback(self) -> None: query = self._make_rollback_query()", "able to receive a response raise err # If we were going to", "tr, pr = await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr, pr =", "= f'DECLARE SAVEPOINT {self._id};' else: query = 'START TRANSACTION;' return query def _make_commit_query(self):", "-> Connection: return await Connection( { \"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\":", "io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self)", "the transaction is already rolled back') if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot", "type: ignore ) async def ensure_connected(self): if self.is_closed(): await self.connect() return self async", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "_ = await self.raw_query(query_context) return result async def execute(self, query: str) -> None:", "= None, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args,", "True await self.start() return self async def __aexit__(self, extype, ex, tb): try: if", "self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol", "f'DECLARE SAVEPOINT {self._id};' else: query = 'START TRANSACTION;' return query def _make_commit_query(self): query", ") async def _fetchall( self, query: str, *args, __limit__: int = 0, __typeids__:", "_ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY,", "self._next_backoff = 0 self._options = connection._options def _retry(self, exc): self._last_exception = exc rule", "if self._nested: query = f'DECLARE SAVEPOINT {self._id};' else: query = 'START TRANSACTION;' return", "== 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True),", "self._nested = True if self._nested: query = f'DECLARE SAVEPOINT {self._id};' else: query =", "type: ignore ) async def _fetchall( self, query: str, *args, __limit__: int =", "have received it or after it have been done but # network is", "tls_security: str = None, test_no_tls: bool = False, wait_until_available: int = 30, timeout:", "ensure_connected(self): if self.is_closed(): await self.connect() return self async def raw_query(self, query_context: abstract.QueryContext): return", "self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async", "str = None, tls_security: str = None, test_no_tls: bool = False, wait_until_available: int", "password: str = None, database: str = None, tls_ca: str = None, tls_ca_file:", "task. # NOTE: rollback error is always swallowed, should we use # on_log_message", "True: addr = self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as", "specific language governing permissions and # limitations under the License. # \"\"\"A specialized", "f'ROLLBACK TO SAVEPOINT {self._id};' return query async def __aenter__(self): if self._managed: raise errors.InterfaceError(", "a response raise err # If we were going to rollback, look at", "e except ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e", "if not self.__started: self.__started = True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0", "if self._managed: raise errors.InterfaceError( 'cannot enter context: already in an `async with` block')", "await self._connection.raw_query(query_context) return result async def execute(self, query: str) -> None: await self._ensure_transaction()", "= False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await", "asyncio.sleep(0.01 + random.random() * 0.2) async def connect_addr(self): tr = None loop =", "_ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type:", "try: await pr.connect() except OSError as e: if tr is not None: tr.close()", "if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' return query async def __aenter__(self):", "self._managed = False self._nested = False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def", "def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return result", "# on_log_message for it? if ( extype is not None and issubclass(extype, errors.EdgeDBError)", "is not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise", "extype is not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def", "self._done = False self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self): return self async", "30, timeout: int = 10, ) -> Connection: return await Connection( { \"dsn\":", "BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async def rollback(self) ->", "max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout} sec\" ) from", "= retry._options.transaction_options self.__retry = retry self.__iteration = iteration self.__started = False async def", "await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS,", "= 0 self._options = connection._options def _retry(self, exc): self._last_exception = exc rule =", "self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return query", "= TransactionState.FAILED raise else: self._state = TransactionState.STARTED async def commit(self) -> None: if", "self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state =", "not None: await self._rollback() else: await self._commit() finally: self._managed = False class Iteration(BaseTransaction,", "self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params return con def", "self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE,", "def is_active(self) -> bool: return self._state is TransactionState.STARTED def __check_state_base(self, opname): if self._state", "None = None def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol", "to retry, regardless of # the rollback error. # In this case we", "type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state is", "await self.raw_query(query_context) return result async def execute(self, query: str) -> None: await self.ensure_connected()", "e except Exception: if tr is not None: tr.close() raise pr.set_connection(self) try: await", "retry self.__iteration = iteration self.__started = False async def __aenter__(self): if self._managed: raise", ") if not self.__started: self.__started = True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration !=", "async def execute(self, query: str) -> None: await self._ensure_transaction() await self._connection.execute(query) async def", "nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start, )) raise nice_err", "return self._params.database def connected_addr(self): return self._params.address async def aclose(self): if not self.is_closed(): try:", "def ensure_connected(self): if self.is_closed(): await self.connect() return self async def raw_query(self, query_context: abstract.QueryContext):", "raise errors.InterfaceError( 'cannot manually commit from within an `async with` block') await self._commit()", "already rolled back') if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction", "kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def", "not use this file except in compliance with the License. # You may", "not None: tr.close() raise pr.set_connection(self) try: await pr.connect() except OSError as e: if", "swallowed, should we use # on_log_message for it? if ( extype is not", "self, query: str, *args, __limit__: int = 0, __typeids__: bool = False, __typenames__:", "query = super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query", "sec\" ) from e except errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or", "return self async def __anext__(self): # Note: when changing this code consider also", "SAVEPOINT {self._id};' return query async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter", ") await self.start() class Retry: def __init__(self, connection): self._connection = connection self._iteration =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e: raise", "callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop() for cb in", "None: tr.close() raise pr.set_connection(self) try: await pr.connect() except OSError as e: if tr", "False self._done = False self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self): return self", "**self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic() if single_attempt: max_time = 0 else:", "self._rollback() else: await self._commit() finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self,", "that API (for example, transactions can be nested and are non-retrying). \"\"\" from", "int = 0, __typeids__: bool = False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int]", "is not None: await self._rollback() else: await self._commit() finally: self._managed = False class", "host: str = None, port: int = None, credentials: str = None, credentials_file:", "for EdgeDB tests. Historically EdgeDB tests relied on a very specific client API", "agreed to in writing, software # distributed under the License is distributed on", "if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start() class Retry: def", "0 self._done = False self._next_backoff = 0 self._options = connection._options def _retry(self, exc):", "rule.backoff(self._iteration) return True def __aiter__(self): return self async def __anext__(self): # Note: when", "import typing import abc import asyncio import enum import functools import random import", "= con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic() if single_attempt: max_time =", "raise pr.set_connection(self) try: await pr.connect() except OSError as e: if tr is not", "f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is", "-> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]:", "await self._commit() finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection,", "is in error state') def __check_state(self, opname): if self._state is not TransactionState.STARTED: if", "else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if con._top_xact", "_make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact =", "SAVEPOINT {self._id};' return query def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is self:", "protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise con_utils.wrap_error(e) from e except", "await loop.create_connection(protocol_factory, *addr) else: try: tr, pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx", "= set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg):", "**kwargs): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache,", "connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic()", "_make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context:", "is_active(self) -> bool: return self._state is TransactionState.STARTED def __check_state_base(self, opname): if self._state is", "return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self) ->", "__init__(self, connection): self._connection = connection self._iteration = 0 self._done = False self._next_backoff =", "edgedb_enums.Capability.ALL # type: ignore ) async def ensure_connected(self): if self.is_closed(): await self.connect() return", "# This source file is part of the EdgeDB open source project. #", "under the License. # \"\"\"A specialized client API for EdgeDB tests. Historically EdgeDB", "to in writing, software # distributed under the License is distributed on an", "implied. # See the License for the specific language governing permissions and #", "None: # On commit we don't know if commit is succeeded before the", "elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr) else: try: tr, pr =", "connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry self.__iteration = iteration self.__started", "__allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args,", "except socket.gaierror as e: # All name resolution errors are considered temporary raise", "self._commit() async def _commit(self) -> None: query = self._make_commit_query() try: await self._connection.execute(query) except", "whether we want to retry, regardless of # the rollback error. # In", "to cancel the whole task. # NOTE: rollback error is always swallowed, should", "we were going to rollback, look at original error # to find out", "@property def dbname(self) -> str: return self._params.database def connected_addr(self): return self._params.address async def", "'START TRANSACTION;' return query def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is self:", "or not self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None,", "is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction is not yet started') self.__check_state_base(opname)", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "con._test_no_tls = self._test_no_tls con._params = self._params return con def _get_query_cache(self) -> abstract.QueryCache: return", "import asyncio import enum import functools import random import socket import ssl import", "await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise con_utils.wrap_error(e) from", "iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry self.__iteration = iteration self.__started =", "query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def", "= connection._options def _retry(self, exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration", "except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def", "as original error is more # important, e.g. in case `CancelledError` it's important", "API that is no longer supported by edgedb-python. Here we implement that API", "e: raise con_utils.wrap_error(e) from e except Exception: if tr is not None: tr.close()", "self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if not self.is_closed(): self._protocol.abort() async", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return", "= await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore", "# All name resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except", "__typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() result,", "self._commit() finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration):", "server_settings=None, ) start = time.monotonic() if single_attempt: max_time = 0 else: max_time =", "self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls", "block') self._managed = True await self.start() return self async def __aexit__(self, extype, ex,", "con._top_xact is None: con._top_xact = self else: # Nested transaction block self._nested =", "TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async def rollback(self) -> None: if self._managed:", "with` block') await self._rollback() async def _rollback(self) -> None: query = self._make_rollback_query() try:", "this case we ignore rollback issue as original error is more # important,", "self._test_no_tls con._params = self._params return con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async", "return iteration += 1 await asyncio.sleep(0.01 + random.random() * 0.2) async def connect_addr(self):", "return result async def execute(self, query: str) -> None: await self.ensure_connected() await self._protocol.simple_query(", "ssl import time from edgedb import abstract from edgedb import errors from edgedb", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result def", "async def __anext__(self): # Note: when changing this code consider also # updating", "False try: if extype is None: await self._commit() else: await self._rollback() except errors.EdgeDBError", "async def _fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected() result, _ = await", "as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr =", "is more # important, e.g. in case `CancelledError` it's important # to propagate", "going to rollback, look at original error # to find out whether we", ">= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout} sec\" )", "client_config.wait_until_available iteration = 1 while True: addr = self._params.address try: await asyncio.wait_for( self.connect_addr(),", "errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout} sec\" ) from e except", "query_context: abstract.QueryContext): await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return result async def", "connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol = None self._query_cache = abstract.QueryCache(", ") return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol is", "not self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None,", "def _fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous(", "within an `async with` block') await self._rollback() async def _rollback(self) -> None: query", "start(self) -> None: query = self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state =", "asyncio import enum import functools import random import socket import ssl import time", "propagate it to cancel the whole task. # NOTE: rollback error is always", "True def __aiter__(self): return self async def __anext__(self): # Note: when changing this", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "original error is more # important, e.g. in case `CancelledError` it's important #", "= None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' else: query =", "= super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query =", "1 and time.monotonic() >= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration,", "query = f'ROLLBACK TO SAVEPOINT {self._id};' return query async def __aenter__(self): if self._managed:", "loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e:", "# NOTE: rollback error is always swallowed, should we use # on_log_message for", "committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction is already", "_rollback(self) -> None: query = self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state =", "pr.set_connection(self) try: await pr.connect() except OSError as e: if tr is not None:", "This source file is part of the EdgeDB open source project. # #", "return Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def", "TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction is not", "e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic() >= max_time)", "execute(self, query: str) -> None: await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if", "self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact = None", "abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None def __init__(self, connect_args, *, test_no_tls=False): super().__init__()", "random.random() * 0.2) async def connect_addr(self): tr = None loop = asyncio.get_running_loop() addr", "self._params return con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def _query(self, query_context:", "try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK", "TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return query async def start(self) ->", "for cb in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args", "started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def", "with` block') await self._commit() async def _commit(self) -> None: query = self._make_commit_query() try:", "retriable transactions are supported. \" \"Use `async with transaction:`\" ) if not self.__started:", "self.__started = True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start()", "NEW = 0 STARTED = 1 COMMITTED = 2 ROLLEDBACK = 3 FAILED", "started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start;", "dropped before we were able to receive a response raise err # If", "not self._managed: raise errors.InterfaceError( \"Only managed retriable transactions are supported. \" \"Use `async", "* 0.2) async def connect_addr(self): tr = None loop = asyncio.get_running_loop() addr =", "e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic() >= max_time) ): nice_err = e.__class__(", "query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result", "the License. # \"\"\"A specialized client API for EdgeDB tests. Historically EdgeDB tests", "self._managed: raise errors.InterfaceError( 'cannot manually rollback from within an `async with` block') await", "self._managed: raise errors.InterfaceError( \"Only managed retriable transactions are supported. \" \"Use `async with", "self._nested = False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool:", "before we were able to receive a response raise err # If we", "self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' else: query", "# limitations under the License. # \"\"\"A specialized client API for EdgeDB tests.", "required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def _fetchall( self, query: str, *args,", "from e except BaseException: if tr is not None: tr.close() raise self._protocol =", "self._test_no_tls = test_no_tls self._params = None self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback)", "StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self, self._connection, self._iteration)", "= TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async def rollback(self) -> None: if", "if iteration > 1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr}", "self._connection = connection self._iteration = 0 self._done = False self._next_backoff = 0 self._options", "`CancelledError` it's important # to propagate it to cancel the whole task. #", "we implement that API (for example, transactions can be nested and are non-retrying).", "retrying_transaction(self) -> Retry: return Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self):", "*, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(),", "to find out whether we want to retry, regardless of # the rollback", "self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO SAVEPOINT", "from e except ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from", "client API that is no longer supported by edgedb-python. Here we implement that", "start + client_config.wait_until_available iteration = 1 while True: addr = self._params.address try: await", "import time from edgedb import abstract from edgedb import errors from edgedb import", "try: tr, pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as", "Here we implement that API (for example, transactions can be nested and are", "self._params.address async def aclose(self): if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception,", "typing import abc import asyncio import enum import functools import random import socket", "transaction is in error state') def __check_state(self, opname): if self._state is not TransactionState.STARTED:", "await self._commit() async def _commit(self) -> None: query = self._make_commit_query() try: await self._connection.execute(query)", "-> str: return self._params.database def connected_addr(self): return self._params.address async def aclose(self): if not", "10, ) -> Connection: return await Connection( { \"dsn\": dsn, \"host\": host, \"port\":", "try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.STARTED", "transaction is already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit')", "is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction is in error state') def", "network is dropped before we were able to receive a response raise err", "if not self.__started: return False try: if extype is None: await self._commit() else:", "_query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return result async", "= self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params return con", "None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params = None", "socket import ssl import time from edgedb import abstract from edgedb import errors", "def _commit(self) -> None: query = self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state", "tr.get_extra_info('ssl_object') ) except socket.gaierror as e: # All name resolution errors are considered", "if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction is in error", "= None, test_no_tls: bool = False, wait_until_available: int = 30, timeout: int =", "tests relied on a very specific client API that is no longer supported", "edgedb.protocol import protocol # type: ignore class TransactionState(enum.Enum): NEW = 0 STARTED =", "query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def", "self._connection.raw_query(query_context) return result async def execute(self, query: str) -> None: await self._ensure_transaction() await", "it to cancel the whole task. # NOTE: rollback error is always swallowed,", "raise errors.InterfaceError( \"Only managed retriable transactions are supported. \" \"Use `async with transaction:`\"", "async def rollback(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually rollback from", "str, *args, __limit__: int = 0, __typeids__: bool = False, __typenames__: bool =", "None, tls_ca: str = None, tls_ca_file: str = None, tls_security: str = None,", "= test_no_tls self._params = None self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback) def", "host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\": database,", "from edgedb.protocol import asyncio_proto # type: ignore from edgedb.protocol import protocol # type:", "None if self._nested: query = f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self): query", "con._params = self._params return con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def", "inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query: str, *args, __limit__: int", "back') if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction is in", "is not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self):", "from edgedb import options from edgedb.protocol import asyncio_proto # type: ignore from edgedb.protocol", "\"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\": tls_security, \"wait_until_available\": wait_until_available, }, test_no_tls=test_no_tls, ).ensure_connected()", "it? if ( extype is not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ):", "errors from edgedb import con_utils from edgedb import enums as edgedb_enums from edgedb", "self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore ) async def ensure_connected(self): if", "client API for EdgeDB tests. Historically EdgeDB tests relied on a very specific", "ignore from edgedb.protocol import protocol # type: ignore class TransactionState(enum.Enum): NEW = 0", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "False self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self): return self async def __anext__(self):", "functools import random import socket import ssl import time from edgedb import abstract", "max_time = start + client_config.wait_until_available iteration = 1 while True: addr = self._params.address", "retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry self.__iteration = iteration", "self.connect() return self async def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args,", "None, credentials_file: str = None, user: str = None, password: str = None,", "else: # Nested transaction block self._nested = True if self._nested: query = f'DECLARE", "import random import socket import ssl import time from edgedb import abstract from", "True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start() class Retry:", "__limit__: int = 0, __typeids__: bool = False, __typenames__: bool = False, __allow_capabilities__:", "TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection", "@abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if", "iteration self.__started = False async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter", "the transaction is already committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname};", "try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if not", "= 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner): self._connection = owner", "self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await", "= 2 ROLLEDBACK = 3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0", "language governing permissions and # limitations under the License. # \"\"\"A specialized client", "asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self, self._connection, self._iteration) self._iteration += 1 return", "_get_last_status(self) -> typing.Optional[str]: if self._protocol is None: return None status = self._protocol.last_status if", "See the License for the specific language governing permissions and # limitations under", "to rollback, look at original error # to find out whether we want", "nice_err from e.__cause__ else: return iteration += 1 await asyncio.sleep(0.01 + random.random() *", "_make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction is", ") async def ensure_connected(self): if self.is_closed(): await self.connect() return self async def raw_query(self,", "TO SAVEPOINT {self._id};' return query async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "return self._protocol.get_settings() @property def dbname(self) -> str: return self._params.database def connected_addr(self): return self._params.address", "f'ROLLBACK TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return query async def start(self)", "we want to retry, regardless of # the rollback error. # In this", "as e: raise con_utils.wrap_error(e) from e except Exception: if tr is not None:", "is no longer supported by edgedb-python. Here we implement that API (for example,", "owner): self._connection = owner self._state = TransactionState.NEW self._managed = False self._nested = False", "already in an `async with` block') self._managed = True await self.start() return self", "return None status = self._protocol.last_status if status is not None: status = status.decode()", "from within an `async with` block') await self._rollback() async def _rollback(self) -> None:", "def is_closed(self): return self._protocol is None or not self._protocol.connected async def connect(self, single_attempt=False):", "functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr, str): # UNIX socket tr,", "= connect_args self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls =", "int = 30, timeout: int = 10, ) -> Connection: return await Connection(", "specialized client API for EdgeDB tests. Historically EdgeDB tests relied on a very", "FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner): self._connection =", "{self._id};' else: query = 'START TRANSACTION;' return query def _make_commit_query(self): query = super()._make_commit_query()", "is dropped before we were able to receive a response raise err #", "try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED", "query = self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else:", "return self._protocol is None or not self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config", "str): # UNIX socket tr, pr = await loop.create_unix_connection( protocol_factory, addr ) elif", "None, user: str = None, password: str = None, database: str = None,", "**kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__,", "RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property", "async def async_connect_test_client( dsn: str = None, host: str = None, port: int", "if commit is succeeded before the # database have received it or after", ") start = time.monotonic() if single_attempt: max_time = 0 else: max_time = start", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= None, tls_ca_file: str = None, tls_security: str = None, test_no_tls: bool =", "__typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() return", "failed in\" f\" {client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError as e: if", "-> None: query = self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED", "= self._protocol.last_status if status is not None: status = status.decode() return status def", "except Exception: if tr is not None: tr.close() raise pr.set_connection(self) try: await pr.connect()", "await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, )", "TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction is not yet started') self.__check_state_base(opname) def", "tr is not None: tr.close() raise self._protocol = pr def retrying_transaction(self) -> Retry:", "= 'ROLLBACK;' return query async def start(self) -> None: query = self._make_start_query() try:", "query = super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query", "1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\"", "if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic() >= max_time) ):", "if tr is not None: tr.close() raise pr.set_connection(self) try: await pr.connect() except OSError", "timeout: int = 10, ) -> Connection: return await Connection( { \"dsn\": dsn,", "raise else: self._state = TransactionState.STARTED async def commit(self) -> None: if self._managed: raise", "err # If we were going to rollback, look at original error #", "(for example, transactions can be nested and are non-retrying). \"\"\" from __future__ import", "errors.InterfaceError( \"Only managed retriable transactions are supported. \" \"Use `async with transaction:`\" )", "is None: return None status = self._protocol.last_status if status is not None: status", "tls_ca: str = None, tls_ca_file: str = None, tls_security: str = None, test_no_tls:", "e.__cause__ else: return iteration += 1 await asyncio.sleep(0.01 + random.random() * 0.2) async", "block') self._managed = True return self async def __aexit__(self, extype, ex, tb): self._managed", "tls_ca_file: str = None, tls_security: str = None, test_no_tls: bool = False, wait_until_available:", "*args, __limit__: int = 0, **kwargs, ): await self.ensure_connected() result, _ = await", "the transaction is already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self):", "query = 'ROLLBACK;' return query async def start(self) -> None: query = self._make_start_query()", "async def _commit(self) -> None: query = self._make_commit_query() try: await self._connection.execute(query) except BaseException:", "self._params = None self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback):", "asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr, str): # UNIX socket tr, pr", "_make_start_query_inner(self): con = self._connection if con._top_xact is None: con._top_xact = self else: #", "implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers( self, query:", "return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _ = await", "enter context: already in an `async with` block') self._managed = True return self", "raise errors.InterfaceError( 'cannot enter context: already in an `async with` block') self._managed =", "TransactionState.FAILED raise else: self._state = TransactionState.STARTED async def commit(self) -> None: if self._managed:", "self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state =", "self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction):", "project. # # Copyright 2016-present MagicStack Inc. and the EdgeDB authors. # #", "are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e: raise con_utils.wrap_error(e)", "return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self):", "= f'ROLLBACK TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return query async def", "protocol # type: ignore class TransactionState(enum.Enum): NEW = 0 STARTED = 1 COMMITTED", "self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' return", "edgedb import options from edgedb.protocol import asyncio_proto # type: ignore from edgedb.protocol import", "KIND, either express or implied. # See the License for the specific language", "self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol is None: return None status =", "if self._state is not TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname};", "in case `CancelledError` it's important # to propagate it to cancel the whole", "( extype is not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex)", "file is part of the EdgeDB open source project. # # Copyright 2016-present", "int = None, credentials: str = None, credentials_file: str = None, user: str", "_get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result,", "if self._log_listeners: loop = asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self, msg) def", "ANY KIND, either express or implied. # See the License for the specific", "self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only managed", "args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self,", "raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e: raise con_utils.wrap_error(e) from e except", "self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start() class Retry: def __init__(self,", "= self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state", "self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' return query async def __aenter__(self): if", "context: already in an `async with` block') self._managed = True return self async", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False self._done = False", "= 1 COMMITTED = 2 ROLLEDBACK = 3 FAILED = 4 class BaseTransaction(abc.ABC):", "retry._options.transaction_options self.__retry = retry self.__iteration = iteration self.__started = False async def __aenter__(self):", "\"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\": database, \"timeout\":", "= 0, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args,", "error is always swallowed, should we use # on_log_message for it? if (", "an `async with` block') self._managed = True return self async def __aexit__(self, extype,", "query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params = None self._log_listeners = set() def add_log_listener(self,", "dsn, \"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password,", "use # on_log_message for it? if ( extype is not None and issubclass(extype,", "str, *args, **kwargs): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs,", "- start, )) raise nice_err from e.__cause__ else: return iteration += 1 await", ") async def _fetchall_json( self, query: str, *args, __limit__: int = 0, **kwargs,", "= self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls =", "tr, pr = await loop.create_connection(protocol_factory, *addr) else: try: tr, pr = await loop.create_connection(", "def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact = None if", "edgedb import abstract from edgedb import errors from edgedb import con_utils from edgedb", "manually commit from within an `async with` block') await self._commit() async def _commit(self)", "is not None: tr.close() raise self._protocol = pr def retrying_transaction(self) -> Retry: return", "if ( extype is not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return", "f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state is TransactionState.STARTED def __check_state_base(self, opname): if", "as err: if ex is None: # On commit we don't know if", "reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def _fetchall(", "def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction()", "self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection) self._options", "f'cannot {opname}; the transaction is not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if", "e except OSError as e: raise con_utils.wrap_error(e) from e except Exception: if tr", "errors.InterfaceError( 'cannot manually rollback from within an `async with` block') await self._rollback() async", "if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction is already rolled", "def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start =", "errors.InterfaceError( f'cannot {opname}; the transaction is already committed') if self._state is TransactionState.ROLLEDBACK: raise", "raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one,", "addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr,", "return self async def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs,", "async def _fetchall( self, query: str, *args, __limit__: int = 0, __typeids__: bool", "async def __aexit__(self, extype, ex, tb): self._managed = False if not self.__started: return", "rollback error. # In this case we ignore rollback issue as original error", "await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async", "None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query()", "self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore", "def aclose(self): if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate()", ") else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e: # All name resolution", "are supported. \" \"Use `async with transaction:`\" ) if not self.__started: self.__started =", "__limit__: int = 0, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous(", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "type: ignore from edgedb.protocol import protocol # type: ignore class TransactionState(enum.Enum): NEW =", "def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self) -> str: return", "pr.connect() except OSError as e: if tr is not None: tr.close() raise con_utils.wrap_error(e)", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def", "connected_addr(self): return self._params.address async def aclose(self): if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect()", "database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\": tls_security, \"wait_until_available\": wait_until_available, }, test_no_tls=test_no_tls,", "{client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT)", "applicable law or agreed to in writing, software # distributed under the License", "0 else: max_time = start + client_config.wait_until_available iteration = 1 while True: addr", "opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction is already", "e: raise con_utils.wrap_error(e) from e except ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED':", "self._state is TransactionState.STARTED def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot", "on_log_message for it? if ( extype is not None and issubclass(extype, errors.EdgeDBError) and", "con._top_xact = self else: # Nested transaction block self._nested = True if self._nested:", "ID_COUNTER = 0 def __init__(self, owner): self._connection = owner self._state = TransactionState.NEW self._managed", "self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction is not yet started')", "= retry self.__iteration = iteration self.__started = False async def __aenter__(self): if self._managed:", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", ") except ssl.CertificateError as e: raise con_utils.wrap_error(e) from e except ssl.SSLError as e:", "an `async with` block') self._managed = True await self.start() return self async def", "async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return", ">= rule.attempts: return False self._done = False self._next_backoff = rule.backoff(self._iteration) return True def", "updating Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True", "to {addr} failed in\" f\" {client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError as", "return query def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact =", "self: self._connection._top_xact = None if self._nested: query = f'RELEASE SAVEPOINT {self._id};' return query", "transactions can be nested and are non-retrying). \"\"\" from __future__ import annotations import", "(iteration > 1 and time.monotonic() >= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e,", "self._protocol.get_settings() @property def dbname(self) -> str: return self._params.database def connected_addr(self): return self._params.address async", "no longer supported by edgedb-python. Here we implement that API (for example, transactions", "except errors.EdgeDBError as err: if ex is None: # On commit we don't", "ignore ) async def _fetchall( self, query: str, *args, __limit__: int = 0,", "writing, software # distributed under the License is distributed on an \"AS IS\"", "= rule.backoff(self._iteration) return True def __aiter__(self): return self async def __anext__(self): # Note:", "self async def __aexit__(self, extype, ex, tb): self._managed = False if not self.__started:", "loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr) else:", "more # important, e.g. in case `CancelledError` it's important # to propagate it", "2016-present MagicStack Inc. and the EdgeDB authors. # # Licensed under the Apache", "block') await self._commit() async def _commit(self) -> None: query = self._make_commit_query() try: await", "= await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror", "self._protocol.abort() async def async_connect_test_client( dsn: str = None, host: str = None, port:", "compliance with the License. # You may obtain a copy of the License", "async def aclose(self): if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError):", "self async def __aexit__(self, extype, ex, tb): try: if extype is not None:", "async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already in an", "self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback')", "from edgedb.protocol import protocol # type: ignore class TransactionState(enum.Enum): NEW = 0 STARTED", "= None, credentials: str = None, credentials_file: str = None, user: str =", "rolled back') if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction is", "cancel the whole task. # NOTE: rollback error is always swallowed, should we", "error # to find out whether we want to retry, regardless of #", "= False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous(", "if status is not None: status = status.decode() return status def is_closed(self): return", "+ client_config.wait_until_available iteration = 1 while True: addr = self._params.address try: await asyncio.wait_for(", "return self._params.address async def aclose(self): if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except", "await self.ensure_connected() result, _ = await self.raw_query(query_context) return result async def execute(self, query:", "self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr, str): #", "# If we were going to rollback, look at original error # to", "async def ensure_connected(self): if self.is_closed(): await self.connect() return self async def raw_query(self, query_context:", "abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry", "tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') )", "not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic() >= max_time) ): nice_err =", "con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls", "): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache,", "def _rollback(self) -> None: query = self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state", "self._state is not TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the", "def dbname(self) -> str: return self._params.database def connected_addr(self): return self._params.address async def aclose(self):", "raise con_utils.wrap_error(e) from e except BaseException: if tr is not None: tr.close() raise", "None, tls_security: str = None, test_no_tls: bool = False, wait_until_available: int = 30,", "credentials_file, \"user\": user, \"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file,", "def transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) ->", "self async def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry,", "return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings()", "0.2) async def connect_addr(self): tr = None loop = asyncio.get_running_loop() addr = self._params.address", "execute(self, query: str) -> None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type:", "self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'RELEASE SAVEPOINT {self._id};'", "(Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if not self.is_closed(): self._protocol.abort() async def async_connect_test_client(", "edgedb import errors from edgedb import con_utils from edgedb import enums as edgedb_enums", "ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr", "io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected()", "if self._nested: query = f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self): query =", "try: if extype is not None: await self._rollback() else: await self._commit() finally: self._managed", "import abstract from edgedb import errors from edgedb import con_utils from edgedb import", "addr = self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e:", "= await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise con_utils.wrap_error(e)", "expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def _fetchall( self, query: str,", "Retry: return Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction()", "self._options = retry._options.transaction_options self.__retry = retry self.__iteration = iteration self.__started = False async", "been done but # network is dropped before we were able to receive", "(the \"License\"); # you may not use this file except in compliance with", "def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def", "if self._managed: raise errors.InterfaceError( 'cannot manually rollback from within an `async with` block')", "await self._rollback() else: await self._commit() finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def", "except BaseException: if tr is not None: tr.close() raise self._protocol = pr def", "# Unless required by applicable law or agreed to in writing, software #", "in error state') def __check_state(self, opname): if self._state is not TransactionState.STARTED: if self._state", "bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() return await", "by applicable law or agreed to in writing, software # distributed under the", "-> None: query = self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED", "return self async def __aexit__(self, extype, ex, tb): self._managed = False if not", "if not self._managed: raise errors.InterfaceError( \"Only managed retriable transactions are supported. \" \"Use", "= status.decode() return status def is_closed(self): return self._protocol is None or not self._protocol.connected", ") elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr) else: try: tr, pr", "-> None: if self._managed: raise errors.InterfaceError( 'cannot manually commit from within an `async", "await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e: if iteration > 1", "e: if iteration > 1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to", "owner self._state = TransactionState.NEW self._managed = False self._nested = False type(self).ID_COUNTER += 1", "file except in compliance with the License. # You may obtain a copy", "always swallowed, should we use # on_log_message for it? if ( extype is", "the transaction is not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is", "False self._nested = False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) ->", "tb): self._managed = False if not self.__started: return False try: if extype is", "In this case we ignore rollback issue as original error is more #", "self._protocol is None: return None status = self._protocol.last_status if status is not None:", "None, port: int = None, credentials: str = None, credentials_file: str = None,", "= asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con =", "if self.is_closed(): await self.connect() return self async def raw_query(self, query_context: abstract.QueryContext): return await", "it's important # to propagate it to cancel the whole task. # NOTE:", "type: ignore class TransactionState(enum.Enum): NEW = 0 STARTED = 1 COMMITTED = 2", "TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction is already started') return self._make_start_query_inner() @abc.abstractmethod", "import asyncio_proto # type: ignore from edgedb.protocol import protocol # type: ignore class", "not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self):", "self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction is already started') return", "context: already in an `async with` block') self._managed = True await self.start() return", "\"Only managed retriable transactions are supported. \" \"Use `async with transaction:`\" ) if", "1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None def", "raise errors.InterfaceError( f'cannot {opname}; the transaction is already committed') if self._state is TransactionState.ROLLEDBACK:", "def execute(self, query: str) -> None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL #", "1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state is TransactionState.STARTED def", "edgedb import enums as edgedb_enums from edgedb import options from edgedb.protocol import asyncio_proto", "Connection( { \"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\":", "BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self):", "client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic() if single_attempt: max_time", "edgedb-python. Here we implement that API (for example, transactions can be nested and", "await self._connection.execute(query) async def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only managed retriable", "None: await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError(", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start, )) raise nice_err from e.__cause__", "transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str,", "RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if con._top_xact is None: con._top_xact = self", "finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection)", "addr ) elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr) else: try: tr,", "self.__started = False async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context:", "we don't know if commit is succeeded before the # database have received", "self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self,", "asyncio_proto # type: ignore from edgedb.protocol import protocol # type: ignore class TransactionState(enum.Enum):", "# type: ignore ) async def ensure_connected(self): if self.is_closed(): await self.connect() return self", "# On commit we don't know if commit is succeeded before the #", "self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self, self._connection, self._iteration) self._iteration +=", "typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs,", "raise nice_err from e.__cause__ else: return iteration += 1 await asyncio.sleep(0.01 + random.random()", "def terminate(self): if not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str = None,", "EdgeDB authors. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "start = time.monotonic() if single_attempt: max_time = 0 else: max_time = start +", "source project. # # Copyright 2016-present MagicStack Inc. and the EdgeDB authors. #", "con_utils from edgedb import enums as edgedb_enums from edgedb import options from edgedb.protocol", "time.monotonic() if single_attempt: max_time = 0 else: max_time = start + client_config.wait_until_available iteration", "temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e: raise con_utils.wrap_error(e) from e", "= await loop.create_connection(protocol_factory, *addr) else: try: tr, pr = await loop.create_connection( protocol_factory, *addr,", "reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache()", "TransactionState.COMMITTED async def rollback(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually rollback", "from within an `async with` block') await self._commit() async def _commit(self) -> None:", "= None, database: str = None, tls_ca: str = None, tls_ca_file: str =", "def _get_last_status(self) -> typing.Optional[str]: if self._protocol is None: return None status = self._protocol.last_status", "{ \"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user,", "error. # In this case we ignore rollback issue as original error is", "self._iteration) self._iteration += 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None", "): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache", "rule.attempts: return False self._done = False self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self):", "self._managed = True return self async def __aexit__(self, extype, ex, tb): self._managed =", "await self._rollback() except errors.EdgeDBError as err: if ex is None: # On commit", "tr.close() raise con_utils.wrap_error(e) from e except BaseException: if tr is not None: tr.close()", "= super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query =", "already in an `async with` block') self._managed = True return self async def", "credentials_file: str = None, user: str = None, password: str = None, database:", "managed retriable transactions are supported. \" \"Use `async with transaction:`\" ) if not", "return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact = None if", "can be nested and are non-retrying). \"\"\" from __future__ import annotations import typing", "query_context: abstract.QueryContext): await self.ensure_connected() result, _ = await self.raw_query(query_context) return result async def", "# Nested transaction block self._nested = True if self._nested: query = f'DECLARE SAVEPOINT", "super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry self.__iteration = iteration self.__started = False", "str = None, password: str = None, database: str = None, tls_ca: str", "if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction is already committed')", "errors.InterfaceError( f'cannot {opname}; the transaction is already rolled back') if self._state is TransactionState.FAILED:", "_make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is", "TransactionState.NEW self._managed = False self._nested = False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}'", "resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e:", "query: str, *args, __limit__: int = 0, **kwargs, ): await self.ensure_connected() result, _", "-> abstract.QueryCache: return self._query_cache async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _", "self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return result async def execute(self, query: str)", ") from e except errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration", "None, tls_ca_file: str = None, tls_security: str = None, test_no_tls: bool = False,", "with transaction:`\" ) if not self.__started: self.__started = True if self._connection.is_closed(): await self._connection.connect(", "return query async def start(self) -> None: query = self._make_start_query() try: await self._connection.execute(query)", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "self.__retry = retry self.__iteration = iteration self.__started = False async def __aenter__(self): if", "return status def is_closed(self): return self._protocol is None or not self._protocol.connected async def", "test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(),", "3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner): self._connection", "async def _fetchall_with_headers( self, query: str, *args, __limit__: int = 0, __typeids__: bool", "io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query: str, *args, __limit__: int =", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def _query(self,", "relied on a very specific client API that is no longer supported by", "Copyright 2016-present MagicStack Inc. and the EdgeDB authors. # # Licensed under the", "con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start, )) raise nice_err from e.__cause__ else:", "except OSError as e: if tr is not None: tr.close() raise con_utils.wrap_error(e) from", "errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) ->", "except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if not self.is_closed(): self._protocol.abort() async def", "return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol is None:", "\"\"\"A specialized client API for EdgeDB tests. Historically EdgeDB tests relied on a", "= True if self._nested: query = f'DECLARE SAVEPOINT {self._id};' else: query = 'START", "an `async with` block') await self._commit() async def _commit(self) -> None: query =", "= False, wait_until_available: int = 30, timeout: int = 10, ) -> Connection:", "duration=time.monotonic() - start, )) raise nice_err from e.__cause__ else: return iteration += 1", "None, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs,", "str = None, tls_ca: str = None, tls_ca_file: str = None, tls_security: str", "abstract from edgedb import errors from edgedb import con_utils from edgedb import enums", "user: str = None, password: str = None, database: str = None, tls_ca:", "async def execute(self, query: str) -> None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL", "port: int = None, credentials: str = None, credentials_file: str = None, user:", "from __future__ import annotations import typing import abc import asyncio import enum import", "0 self._options = connection._options def _retry(self, exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc)", "{opname}; the transaction is already rolled back') if self._state is TransactionState.FAILED: raise errors.InterfaceError(", "= 0 self._done = False self._next_backoff = 0 self._options = connection._options def _retry(self,", "con_utils.wrap_error(e) from e tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else:", "# to propagate it to cancel the whole task. # NOTE: rollback error", "self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction is in error state')", "the License for the specific language governing permissions and # limitations under the", "Exception: if tr is not None: tr.close() raise pr.set_connection(self) try: await pr.connect() except", "self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return", "def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol = None self._query_cache", "= TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if con._top_xact is None:", "tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e: # All", "query, edgedb_enums.Capability.ALL # type: ignore ) async def ensure_connected(self): if self.is_closed(): await self.connect()", "if tr is not None: tr.close() raise con_utils.wrap_error(e) from e except BaseException: if", "as e: if iteration > 1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting", "implement that API (for example, transactions can be nested and are non-retrying). \"\"\"", "as e: if tr is not None: tr.close() raise con_utils.wrap_error(e) from e except", "import protocol # type: ignore class TransactionState(enum.Enum): NEW = 0 STARTED = 1", "= 0 def __init__(self, owner): self._connection = owner self._state = TransactionState.NEW self._managed =", "args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "in\" f\" {client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError as e: if (", "the specific language governing permissions and # limitations under the License. # \"\"\"A", "e.g. in case `CancelledError` it's important # to propagate it to cancel the", "have been done but # network is dropped before we were able to", "nested and are non-retrying). \"\"\" from __future__ import annotations import typing import abc", "class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None def __init__(self, connect_args, *,", ") try: if isinstance(addr, str): # UNIX socket tr, pr = await loop.create_unix_connection(", "1 while True: addr = self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except", "tb): try: if extype is not None: await self._rollback() else: await self._commit() finally:", "*args, **kwargs): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry,", "= self else: # Nested transaction block self._nested = True if self._nested: query", "with` block') self._managed = True return self async def __aexit__(self, extype, ex, tb):", "typing.Any]: return self._protocol.get_settings() @property def dbname(self) -> str: return self._params.database def connected_addr(self): return", "None: con._top_xact = self else: # Nested transaction block self._nested = True if", "= functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr, str): # UNIX socket", "__typeids__: bool = False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs,", "def commit(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually commit from within", "tr = None loop = asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol,", "= None loop = asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params,", "transaction is already committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the", "*addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e: # All name", "def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact", "await self.connect() return self async def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query,", "await self.start() return self async def __aexit__(self, extype, ex, tb): try: if extype", "status.decode() return status def is_closed(self): return self._protocol is None or not self._protocol.connected async", "Version 2.0 (the \"License\"); # you may not use this file except in", "is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction is already rolled back') if", "str = None, port: int = None, credentials: str = None, credentials_file: str", "= abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params = None self._log_listeners =", "be nested and are non-retrying). \"\"\" from __future__ import annotations import typing import", "ssl.CertificateError as e: raise con_utils.wrap_error(e) from e except ssl.SSLError as e: if e.reason", "else: query = 'START TRANSACTION;' return query def _make_commit_query(self): query = super()._make_commit_query() if", "def connected_addr(self): return self._params.address async def aclose(self): if not self.is_closed(): try: self._protocol.terminate() await", "not self.__started: self.__started = True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 )", "class TransactionState(enum.Enum): NEW = 0 STARTED = 1 COMMITTED = 2 ROLLEDBACK =", "query = self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else:", "limitations under the License. # \"\"\"A specialized client API for EdgeDB tests. Historically", "if self._managed: raise errors.InterfaceError( 'cannot manually commit from within an `async with` block')", "edgedb.protocol import asyncio_proto # type: ignore from edgedb.protocol import protocol # type: ignore", "_on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self,", "query: str, *args, __limit__: int = 0, __typeids__: bool = False, __typenames__: bool", "know if commit is succeeded before the # database have received it or", "def rollback(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually rollback from within", "super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK", "start, )) raise nice_err from e.__cause__ else: return iteration += 1 await asyncio.sleep(0.01", "port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\": database, \"timeout\": timeout,", "self._state = TransactionState.NEW self._managed = False self._nested = False type(self).ID_COUNTER += 1 self._id", "get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self) -> str: return self._params.database", "from e except errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration >", "None self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def", "if not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str = None, host: str", "addr, attempts=iteration, duration=time.monotonic() - start, )) raise nice_err from e.__cause__ else: return iteration", "find out whether we want to retry, regardless of # the rollback error.", "class Retry: def __init__(self, connection): self._connection = connection self._iteration = 0 self._done =", "kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self,", "def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop() for", "self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self) -> str:", "TransactionState.STARTED def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the", "import ssl import time from edgedb import abstract from edgedb import errors from", "ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol", "None status = self._protocol.last_status if status is not None: status = status.decode() return", "we ignore rollback issue as original error is more # important, e.g. in", "+= 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None", "should we use # on_log_message for it? if ( extype is not None", "are non-retrying). \"\"\" from __future__ import annotations import typing import abc import asyncio", "add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop", "= None def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol =", "errors.EdgeDBError as err: if ex is None: # On commit we don't know", "else: self._state = TransactionState.COMMITTED async def rollback(self) -> None: if self._managed: raise errors.InterfaceError(", "TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction is already rolled back') if self._state", "from edgedb import enums as edgedb_enums from edgedb import options from edgedb.protocol import", "in an `async with` block') self._managed = True await self.start() return self async", "_fetchall_with_headers( self, query: str, *args, __limit__: int = 0, __typeids__: bool = False,", "by edgedb-python. Here we implement that API (for example, transactions can be nested", "return False self._done = False self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self): return", "out whether we want to retry, regardless of # the rollback error. #", "receive a response raise err # If we were going to rollback, look", "commit from within an `async with` block') await self._commit() async def _commit(self) ->", "con = self._connection if con._top_xact is None: con._top_xact = self else: # Nested", "= TransactionState.STARTED async def commit(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually", "typing.Optional[str]: if self._protocol is None: return None status = self._protocol.last_status if status is", "): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__,", "part of the EdgeDB open source project. # # Copyright 2016-present MagicStack Inc.", "test_no_tls self._params = None self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self,", "Nested transaction block self._nested = True if self._nested: query = f'DECLARE SAVEPOINT {self._id};'", "tr is not None: tr.close() raise con_utils.wrap_error(e) from e except BaseException: if tr", "return self async def __aexit__(self, extype, ex, tb): try: if extype is not", "self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK", ") except socket.gaierror as e: # All name resolution errors are considered temporary", "self._state = TransactionState.COMMITTED async def rollback(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot", "SAVEPOINT {self._id};' else: query = 'START TRANSACTION;' return query def _make_commit_query(self): query =", "extype, ex, tb): try: if extype is not None: await self._rollback() else: await", "and time.monotonic() >= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic()", "abstract.QueryContext): await self.ensure_connected() result, _ = await self.raw_query(query_context) return result async def execute(self,", "errors.InterfaceError( 'cannot start; the transaction is already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self):", "_make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested:", "def __anext__(self): # Note: when changing this code consider also # updating Retry.__next__.", "OF ANY KIND, either express or implied. # See the License for the", "if self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration =", "ignore ) async def ensure_connected(self): if self.is_closed(): await self.connect() return self async def", "def start(self) -> None: query = self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state", "raise errors.InterfaceError( f'cannot {opname}; the transaction is already rolled back') if self._state is", "if self._protocol is None: return None status = self._protocol.last_status if status is not", "as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic() >=", "= Iteration(self, self._connection, self._iteration) self._iteration += 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact:", "msg): if self._log_listeners: loop = asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self, msg)", "= self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr, str):", "an `async with` block') await self._rollback() async def _rollback(self) -> None: query =", "abstract.QueryContext): await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return result async def execute(self,", "+= 1 await asyncio.sleep(0.01 + random.random() * 0.2) async def connect_addr(self): tr =", "cb in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args =", "aclose(self): if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise", "want to retry, regardless of # the rollback error. # In this case", "_fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query,", "Connection: return await Connection( { \"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\": credentials,", "abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL,", "NOTE: rollback error is always swallowed, should we use # on_log_message for it?", "= self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False self._done = False self._next_backoff =", "of # the rollback error. # In this case we ignore rollback issue", "is TransactionState.STARTED def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname};", "typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self) -> str: return self._params.database def connected_addr(self):", "def _fetchall( self, query: str, *args, __limit__: int = 0, __typeids__: bool =", "def _fetchall_with_headers( self, query: str, *args, __limit__: int = 0, __typeids__: bool =", "else: self._state = TransactionState.STARTED async def commit(self) -> None: if self._managed: raise errors.InterfaceError(", "self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str = None, host: str = None,", "already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;'", "`async with` block') self._managed = True return self async def __aexit__(self, extype, ex,", "4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner): self._connection = owner self._state", "def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction", "raise con_utils.wrap_error(e) from e except ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise", "def __aiter__(self): return self async def __anext__(self): # Note: when changing this code", "str: return self._params.database def connected_addr(self): return self._params.address async def aclose(self): if not self.is_closed():", "EdgeDB open source project. # # Copyright 2016-present MagicStack Inc. and the EdgeDB", "# important, e.g. in case `CancelledError` it's important # to propagate it to", "class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if con._top_xact is None: con._top_xact =", "single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic() if", "con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def _query(self, query_context: abstract.QueryContext): await", "= False if not self.__started: return False try: if extype is None: await", "is None: # On commit we don't know if commit is succeeded before", "enum import functools import random import socket import ssl import time from edgedb", "or agreed to in writing, software # distributed under the License is distributed", "options from edgedb.protocol import asyncio_proto # type: ignore from edgedb.protocol import protocol #", "from edgedb import abstract from edgedb import errors from edgedb import con_utils from", "import socket import ssl import time from edgedb import abstract from edgedb import", "= f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state is TransactionState.STARTED def __check_state_base(self, opname):", "inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers( self, query: str,", "self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if", "raise errors.InterfaceError( f'cannot {opname}; the transaction is in error state') def __check_state(self, opname):", "False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options", "set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if", "e except BaseException: if tr is not None: tr.close() raise self._protocol = pr", "= None, port: int = None, credentials: str = None, credentials_file: str =", "self._connect_args = connect_args self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls", "query = f'ROLLBACK TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return query async", "opname): if self._state is not TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot", "the EdgeDB authors. # # Licensed under the Apache License, Version 2.0 (the", "password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\": tls_security, \"wait_until_available\": wait_until_available,", "0 def __init__(self, owner): self._connection = owner self._state = TransactionState.NEW self._managed = False", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self)", "with` block') self._managed = True await self.start() return self async def __aexit__(self, extype,", "self else: # Nested transaction block self._nested = True if self._nested: query =", "pr = await loop.create_connection(protocol_factory, *addr) else: try: tr, pr = await loop.create_connection( protocol_factory,", "self._params.database def connected_addr(self): return self._params.address async def aclose(self): if not self.is_closed(): try: self._protocol.terminate()", "bool = False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ):", "License. # You may obtain a copy of the License at # #", "-> Retry: return Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return", "def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners:", "= False self._nested = False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self)", "*addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise con_utils.wrap_error(e) from e except ssl.SSLError", "None: await self._rollback() else: await self._commit() finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor):", "return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__,", "str = None, tls_ca_file: str = None, tls_security: str = None, test_no_tls: bool", "self._options = connection._options def _retry(self, exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if", "self._managed = True await self.start() return self async def __aexit__(self, extype, ex, tb):", "def __aexit__(self, extype, ex, tb): try: if extype is not None: await self._rollback()", "str = None, credentials_file: str = None, user: str = None, password: str", "query async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already in", "RawTransaction | None = None def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args =", "# \"\"\"A specialized client API for EdgeDB tests. Historically EdgeDB tests relied on", "await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__,", "None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' else: query = 'ROLLBACK;'", "raise con_utils.wrap_error(e) from e except Exception: if tr is not None: tr.close() raise", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "loop.create_connection(protocol_factory, *addr) else: try: tr, pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx )", "# database have received it or after it have been done but #", "ex, tb): try: if extype is not None: await self._rollback() else: await self._commit()", "= self._params return con def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def _query(self,", "if isinstance(addr, str): # UNIX socket tr, pr = await loop.create_unix_connection( protocol_factory, addr", "# type: ignore class TransactionState(enum.Enum): NEW = 0 STARTED = 1 COMMITTED =", "import enums as edgedb_enums from edgedb import options from edgedb.protocol import asyncio_proto #", "edgedb import con_utils from edgedb import enums as edgedb_enums from edgedb import options", "the whole task. # NOTE: rollback error is always swallowed, should we use", "rollback error is always swallowed, should we use # on_log_message for it? if", "rollback, look at original error # to find out whether we want to", "self._params, loop ) try: if isinstance(addr, str): # UNIX socket tr, pr =", "query def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact = None", "return result async def _fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected() result, _", "= 3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner):", "were going to rollback, look at original error # to find out whether", "self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction is already rolled back')", "License, Version 2.0 (the \"License\"); # you may not use this file except", "already committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction is", "= 1 while True: addr = self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, )", "the rollback error. # In this case we ignore rollback issue as original", "database have received it or after it have been done but # network", "= time.monotonic() if single_attempt: max_time = 0 else: max_time = start + client_config.wait_until_available", "raise errors.InterfaceError( 'cannot manually rollback from within an `async with` block') await self._rollback()", "_fetchall( self, query: str, *args, __limit__: int = 0, __typeids__: bool = False,", "\"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\":", "MagicStack Inc. and the EdgeDB authors. # # Licensed under the Apache License,", "__init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol = None self._query_cache =", "self.start() class Retry: def __init__(self, connection): self._connection = connection self._iteration = 0 self._done", "= False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return", "= await self.raw_query(query_context) return result async def execute(self, query: str) -> None: await", "or (iteration > 1 and time.monotonic() >= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error(", "if extype is None: await self._commit() else: await self._rollback() except errors.EdgeDBError as err:", "self.terminate() raise def terminate(self): if not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str", "await self._rollback() async def _rollback(self) -> None: query = self._make_rollback_query() try: await self._connection.execute(query)", "bool: return self._state is TransactionState.STARTED def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise", "functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e: #", "Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry =", "not self.__started: return False try: if extype is None: await self._commit() else: await", "if extype is not None: await self._rollback() else: await self._commit() finally: self._managed =", "_query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _ = await self.raw_query(query_context) return result async", "not None: tr.close() raise self._protocol = pr def retrying_transaction(self) -> Retry: return Retry(self)", "self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return", "we use # on_log_message for it? if ( extype is not None and", "non-retrying). \"\"\" from __future__ import annotations import typing import abc import asyncio import", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "def __aexit__(self, extype, ex, tb): self._managed = False if not self.__started: return False", "None: tr.close() raise con_utils.wrap_error(e) from e except BaseException: if tr is not None:", ") return result async def _fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected() result,", "TRANSACTION;' return query def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact", ") -> Connection: return await Connection( { \"dsn\": dsn, \"host\": host, \"port\": port,", "iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None def __init__(self, connect_args,", "and # limitations under the License. # \"\"\"A specialized client API for EdgeDB", "while True: addr = self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError", "and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout}", "self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False,", "True if self._nested: query = f'DECLARE SAVEPOINT {self._id};' else: query = 'START TRANSACTION;'", "io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers( self, query: str, *args, __limit__:", "# the rollback error. # In this case we ignore rollback issue as", "{self._id};' return query def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact", "True iteration = Iteration(self, self._connection, self._iteration) self._iteration += 1 return iteration class Connection(options._OptionsMixin,", "con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic() if single_attempt: max_time = 0", "ignore class TransactionState(enum.Enum): NEW = 0 STARTED = 1 COMMITTED = 2 ROLLEDBACK", "await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if not self.is_closed(): self._protocol.abort()", "from edgedb import errors from edgedb import con_utils from edgedb import enums as", "not None and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return", "done but # network is dropped before we were able to receive a", "def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact = None if", "= None, **kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry,", "error is more # important, e.g. in case `CancelledError` it's important # to", "self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start = time.monotonic() if single_attempt:", "is already rolled back') if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the", "TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname}; the transaction is in error state') def __check_state(self,", "of the EdgeDB open source project. # # Copyright 2016-present MagicStack Inc. and", "or implied. # See the License for the specific language governing permissions and", "permissions and # limitations under the License. # \"\"\"A specialized client API for", "errors.InterfaceError( 'cannot enter context: already in an `async with` block') self._managed = True", "error state') def __check_state(self, opname): if self._state is not TransactionState.STARTED: if self._state is", "= self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state", "await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.STARTED async", "self, query: str, *args, __limit__: int = 0, **kwargs, ): await self.ensure_connected() result,", "= None self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback)", "consider also # updating Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff)", "import abc import asyncio import enum import functools import random import socket import", "= False async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already", "'cannot start; the transaction is already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ...", "self._connection.execute(query) async def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only managed retriable transactions", "self.__started: self.__started = True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 ) await", "self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e: if iteration > 1 and time.monotonic()", "max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start, ))", "__aexit__(self, extype, ex, tb): try: if extype is not None: await self._rollback() else:", "reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query:", "transaction is already rolled back') if self._state is TransactionState.FAILED: raise errors.InterfaceError( f'cannot {opname};", "and are non-retrying). \"\"\" from __future__ import annotations import typing import abc import", "if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction is not yet", "raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self, self._connection,", "is None or not self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments(", "None, password: str = None, database: str = None, tls_ca: str = None,", "result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, #", "pr = await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr, pr = await", "tr, pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e:", "query = 'START TRANSACTION;' return query def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact", "commit we don't know if commit is succeeded before the # database have", "self._managed = False if not self.__started: return False try: if extype is None:", "when changing this code consider also # updating Retry.__next__. if self._done: raise StopAsyncIteration", "random import socket import ssl import time from edgedb import abstract from edgedb", "\"user\": user, \"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\":", "open source project. # # Copyright 2016-present MagicStack Inc. and the EdgeDB authors.", "Inc. and the EdgeDB authors. # # Licensed under the Apache License, Version", "def __init__(self, retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry = retry self.__iteration", "= None, tls_ca: str = None, tls_ca_file: str = None, tls_security: str =", "wait_until_available: int = 30, timeout: int = 10, ) -> Connection: return await", "def async_connect_test_client( dsn: str = None, host: str = None, port: int =", "None, credentials: str = None, credentials_file: str = None, user: str = None,", "use this file except in compliance with the License. # You may obtain", "False async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already in", "rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False self._done = False self._next_backoff", "API for EdgeDB tests. Historically EdgeDB tests relied on a very specific client", "supported by edgedb-python. Here we implement that API (for example, transactions can be", "-> typing.Optional[str]: if self._protocol is None: return None status = self._protocol.last_status if status", "Historically EdgeDB tests relied on a very specific client API that is no", "await self.start() class Retry: def __init__(self, connection): self._connection = connection self._iteration = 0", "asyncio.CancelledError): self.terminate() raise def terminate(self): if not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn:", "rollback from within an `async with` block') await self._rollback() async def _rollback(self) ->", "self._iteration >= rule.attempts: return False self._done = False self._next_backoff = rule.backoff(self._iteration) return True", "governing permissions and # limitations under the License. # \"\"\"A specialized client API", "\"\"\" from __future__ import annotations import typing import abc import asyncio import enum", "= None, user: str = None, password: str = None, database: str =", "None def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args self._protocol = None", "also # updating Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done", "tr.close() raise self._protocol = pr def retrying_transaction(self) -> Retry: return Retry(self) def transaction(self)", "raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout} sec\" ) from e", "async_connect_test_client( dsn: str = None, host: str = None, port: int = None,", "return await Connection( { \"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\":", "for the specific language governing permissions and # limitations under the License. #", "user, \"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\": tls_security,", "to receive a response raise err # If we were going to rollback,", "connect_args self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls", "args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async", "con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e: # All name resolution errors are", "example, transactions can be nested and are non-retrying). \"\"\" from __future__ import annotations", "query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore )", "self._commit() else: await self._rollback() except errors.EdgeDBError as err: if ex is None: #", "1 await asyncio.sleep(0.01 + random.random() * 0.2) async def connect_addr(self): tr = None", "ignore rollback issue as original error is more # important, e.g. in case", ")) raise nice_err from e.__cause__ else: return iteration += 1 await asyncio.sleep(0.01 +", "False, wait_until_available: int = 30, timeout: int = 10, ) -> Connection: return", "succeeded before the # database have received it or after it have been", "def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already in an `async", "# In this case we ignore rollback issue as original error is more", "except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async def rollback(self)", "before the # database have received it or after it have been done", "loop = asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con", "async def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, ) start", "extype is not None: await self._rollback() else: await self._commit() finally: self._managed = False", "if con._top_xact is None: con._top_xact = self else: # Nested transaction block self._nested", "self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__,", "is not None: tr.close() raise con_utils.wrap_error(e) from e except BaseException: if tr is", "Iteration(self, self._connection, self._iteration) self._iteration += 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "credentials: str = None, credentials_file: str = None, user: str = None, password:", "return query async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context: already", "-> None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore ) async", "# type: ignore from edgedb.protocol import protocol # type: ignore class TransactionState(enum.Enum): NEW", "None: query = self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise", "e: # All name resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e", "at original error # to find out whether we want to retry, regardless", "from edgedb import con_utils from edgedb import enums as edgedb_enums from edgedb import", "0 STARTED = 1 COMMITTED = 2 ROLLEDBACK = 3 FAILED = 4", "None: if self._managed: raise errors.InterfaceError( 'cannot manually commit from within an `async with`", "self._managed: raise errors.InterfaceError( 'cannot manually commit from within an `async with` block') await", "a very specific client API that is no longer supported by edgedb-python. Here", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "else: await self._rollback() except errors.EdgeDBError as err: if ex is None: # On", "self._managed: raise errors.InterfaceError( 'cannot enter context: already in an `async with` block') self._managed", "self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore ) async def ensure_connected(self): if self.is_closed(): await", "inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers( self, query: str, *args,", "inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query: str, *args, __limit__:", "def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format,", "= pr def retrying_transaction(self) -> Retry: return Retry(self) def transaction(self) -> RawTransaction: return", "\"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\": tls_security, \"wait_until_available\":", "io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def _fetchall( self, query:", "ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return", "name resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as", "self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol", "in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args", "str = None, test_no_tls: bool = False, wait_until_available: int = 30, timeout: int", "return result async def _fetchall_with_headers( self, query: str, *args, __limit__: int = 0,", "from e tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol(", "asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if", "terminate(self): if not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str = None, host:", "not yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError(", "command_timeout=None, server_settings=None, ) start = time.monotonic() if single_attempt: max_time = 0 else: max_time", "ROLLEDBACK = 3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self,", "and the EdgeDB authors. # # Licensed under the Apache License, Version 2.0", "is not None: tr.close() raise pr.set_connection(self) try: await pr.connect() except OSError as e:", "con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params", "with the License. # You may obtain a copy of the License at", "is succeeded before the # database have received it or after it have", "asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e: if iteration > 1 and", "int = 10, ) -> Connection: return await Connection( { \"dsn\": dsn, \"host\":", "start; the transaction is already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def", "'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact = None if self._nested:", "= owner self._state = TransactionState.NEW self._managed = False self._nested = False type(self).ID_COUNTER +=", "await self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start() class Retry: def __init__(self, connection):", "protocol_factory, addr ) elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr) else: try:", "is part of the EdgeDB open source project. # # Copyright 2016-present MagicStack", "await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context) return result async def execute(self, query:", "None, host: str = None, port: int = None, credentials: str = None,", "str) -> None: await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if not self._managed:", "law or agreed to in writing, software # distributed under the License is", "not TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction is", "callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop =", "else: await self._commit() finally: self._managed = False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry,", "if ex is None: # On commit we don't know if commit is", "kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self, query:", "commit is succeeded before the # database have received it or after it", "import annotations import typing import abc import asyncio import enum import functools import", "but # network is dropped before we were able to receive a response", "= False class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection) self._options =", "STARTED = 1 COMMITTED = 2 ROLLEDBACK = 3 FAILED = 4 class", "= self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state", "in compliance with the License. # You may obtain a copy of the", "iteration = Iteration(self, self._connection, self._iteration) self._iteration += 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor):", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "self._rollback() except errors.EdgeDBError as err: if ex is None: # On commit we", "= None, tls_security: str = None, test_no_tls: bool = False, wait_until_available: int =", "and issubclass(extype, errors.EdgeDBError) and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def", "is already started') return self._make_start_query_inner() @abc.abstractmethod def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return", "exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False", "async def _fetchall_json( self, query: str, *args, __limit__: int = 0, **kwargs, ):", "__aiter__(self): return self async def __anext__(self): # Note: when changing this code consider", "return self._state is TransactionState.STARTED def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError(", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query,", "await asyncio.sleep(0.01 + random.random() * 0.2) async def connect_addr(self): tr = None loop", "considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from e except OSError as e: raise con_utils.wrap_error(e) from", "( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic() >= max_time) ): nice_err", "pr def retrying_transaction(self) -> Retry: return Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as e: # All name resolution errors", "| None = None def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args = connect_args", "self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction is already committed') if", "_commit(self) -> None: query = self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state =", "rollback(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually rollback from within an", "the EdgeDB open source project. # # Copyright 2016-present MagicStack Inc. and the", "class Iteration(BaseTransaction, abstract.AsyncIOExecutor): def __init__(self, retry, connection, iteration): super().__init__(connection) self._options = retry._options.transaction_options self.__retry", "ex, tb): self._managed = False if not self.__started: return False try: if extype", "-> None: query = self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED", "On commit we don't know if commit is succeeded before the # database", "def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol is None: return None", "state') def __check_state(self, opname): if self._state is not TransactionState.STARTED: if self._state is TransactionState.NEW:", "TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if con._top_xact is None: con._top_xact", "# Copyright 2016-present MagicStack Inc. and the EdgeDB authors. # # Licensed under", "+= 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state is TransactionState.STARTED", "await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result", "the # database have received it or after it have been done but", "__allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous(", "as edgedb_enums from edgedb import options from edgedb.protocol import asyncio_proto # type: ignore", "manually rollback from within an `async with` block') await self._rollback() async def _rollback(self)", "changing this code consider also # updating Retry.__next__. if self._done: raise StopAsyncIteration if", "within an `async with` block') await self._commit() async def _commit(self) -> None: query", "def _get_query_cache(self) -> abstract.QueryCache: return self._query_cache async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected()", "self._state = TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async def rollback(self) -> None:", "'cannot manually rollback from within an `async with` block') await self._rollback() async def", "whole task. # NOTE: rollback error is always swallowed, should we use #", "self._make_start_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state =", "bool = False, wait_until_available: int = 30, timeout: int = 10, ) ->", "if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'RELEASE SAVEPOINT", "transaction:`\" ) if not self.__started: self.__started = True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration", "extype, ex, tb): self._managed = False if not self.__started: return False try: if", "raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if", "time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout} sec\"", "*addr) else: try: tr, pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except", "= False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() result, _ =", "self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params = None self._log_listeners", "edgedb_enums from edgedb import options from edgedb.protocol import asyncio_proto # type: ignore from", "\"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\":", "= None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' return query async", "time from edgedb import abstract from edgedb import errors from edgedb import con_utils", "pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except", "query: str) -> None: await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if not", "result async def execute(self, query: str) -> None: await self._ensure_transaction() await self._connection.execute(query) async", "await self._commit() else: await self._rollback() except errors.EdgeDBError as err: if ex is None:", "async def _rollback(self) -> None: query = self._make_rollback_query() try: await self._connection.execute(query) except BaseException:", "this code consider also # updating Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff:", "await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__,", "SAVEPOINT {self._id};' else: query = 'ROLLBACK;' return query async def start(self) -> None:", "status def is_closed(self): return self._protocol is None or not self._protocol.connected async def connect(self,", "# updating Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done =", "class BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner): self._connection = owner self._state =", "iteration += 1 await asyncio.sleep(0.01 + random.random() * 0.2) async def connect_addr(self): tr", "from e.__cause__ else: return iteration += 1 await asyncio.sleep(0.01 + random.random() * 0.2)", "False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() result, _ = await", "\"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\":", "qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers( self,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "response raise err # If we were going to rollback, look at original", "attempts=iteration, duration=time.monotonic() - start, )) raise nice_err from e.__cause__ else: return iteration +=", "kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def", "return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async", "... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self:", "is None: await self._commit() else: await self._rollback() except errors.EdgeDBError as err: if ex", "transactions are supported. \" \"Use `async with transaction:`\" ) if not self.__started: self.__started", "pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise", "# # This source file is part of the EdgeDB open source project.", "very specific client API that is no longer supported by edgedb-python. Here we", "= False self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self): return self async def", "status = status.decode() return status def is_closed(self): return self._protocol is None or not", "self._protocol is None or not self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config =", "single_attempt=self.__iteration != 0 ) await self.start() class Retry: def __init__(self, connection): self._connection =", "async def connect_addr(self): tr = None loop = asyncio.get_running_loop() addr = self._params.address protocol_factory", "= await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__,", "if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction is already started')", "original error # to find out whether we want to retry, regardless of", "self._connection if con._top_xact is None: con._top_xact = self else: # Nested transaction block", "API (for example, transactions can be nested and are non-retrying). \"\"\" from __future__", "def __init__(self, connection): self._connection = connection self._iteration = 0 self._done = False self._next_backoff", "is not None: status = status.decode() return status def is_closed(self): return self._protocol is", "client_config.connect_timeout, ) except TimeoutError as e: if iteration > 1 and time.monotonic() >=", "allow_capabilities=edgedb_enums.Capability.ALL, # type: ignore ) async def _fetchall( self, query: str, *args, __limit__:", "con_utils.wrap_error(e) from e except ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e)", "self._done = True iteration = Iteration(self, self._connection, self._iteration) self._iteration += 1 return iteration", "True return self async def __aexit__(self, extype, ex, tb): self._managed = False if", "this file except in compliance with the License. # You may obtain a", "def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction", ") self._test_no_tls = test_no_tls self._params = None self._log_listeners = set() def add_log_listener(self, callback):", "self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def", "0, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs,", "str = None, host: str = None, port: int = None, credentials: str", "raise err # If we were going to rollback, look at original error", "implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self, query: str, *args, **kwargs):", "msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache", "it have been done but # network is dropped before we were able", "self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params return con def _get_query_cache(self) -> abstract.QueryCache:", "self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False self._done", "str = None, database: str = None, tls_ca: str = None, tls_ca_file: str", "self._next_backoff = rule.backoff(self._iteration) return True def __aiter__(self): return self async def __anext__(self): #", "err: if ex is None: # On commit we don't know if commit", "rollback issue as original error is more # important, e.g. in case `CancelledError`", "ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise con_utils.wrap_error(e) from e except ssl.SSLError as", "con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params return con def _get_query_cache(self)", "\" \"Use `async with transaction:`\" ) if not self.__started: self.__started = True if", "if not self.is_closed(): try: self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def", "import enum import functools import random import socket import ssl import time from", "time.monotonic() >= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() -", "import errors from edgedb import con_utils from edgedb import enums as edgedb_enums from", "_make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested:", "async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _ = await self.raw_query(query_context) return", "socket tr, pr = await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr, pr", "_get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result,", "__future__ import annotations import typing import abc import asyncio import enum import functools", "self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False self._done = False self._next_backoff = rule.backoff(self._iteration)", "super().__init__() self._connect_args = connect_args self._protocol = None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), )", "and ex.has_tag(errors.SHOULD_RETRY) ): return self.__retry._retry(ex) def _make_start_query_inner(self): return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache:", "qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self, query: str, *args,", "): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start, )) raise", "= None if self._nested: query = f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self):", "None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' return query async def", "None or not self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args,", "self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state is TransactionState.STARTED def __check_state_base(self,", "'ROLLBACK;' return query async def start(self) -> None: query = self._make_start_query() try: await", "loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError as e: raise con_utils.wrap_error(e) from e", "f\"connecting to {addr} failed in\" f\" {client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "_make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query =", "is always swallowed, should we use # on_log_message for it? if ( extype", "# # Copyright 2016-present MagicStack Inc. and the EdgeDB authors. # # Licensed", "asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb, self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__)", "+ random.random() * 0.2) async def connect_addr(self): tr = None loop = asyncio.get_running_loop()", "await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore )", "try: if extype is None: await self._commit() else: await self._rollback() except errors.EdgeDBError as", "query: str) -> None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore", "iteration > 1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed", "def __init__(self, owner): self._connection = owner self._state = TransactionState.NEW self._managed = False self._nested", "await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, # type:", "def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache =", "If we were going to rollback, look at original error # to find", "OSError as e: if tr is not None: tr.close() raise con_utils.wrap_error(e) from e", "self._iteration = 0 self._done = False self._next_backoff = 0 self._options = connection._options def", "required by applicable law or agreed to in writing, software # distributed under", "= e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start, )) raise nice_err from", "__init__(self, owner): self._connection = owner self._state = TransactionState.NEW self._managed = False self._nested =", "not None: tr.close() raise con_utils.wrap_error(e) from e except BaseException: if tr is not", "None: query = self._make_commit_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise", "protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try: if isinstance(addr, str): # UNIX", "self._state = TransactionState.FAILED raise else: self._state = TransactionState.STARTED async def commit(self) -> None:", "qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query: str,", "return self._options.start_transaction_query() def _get_query_cache(self) -> abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext):", "allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers( self, query: str, *args, __limit__: int", "EdgeDB tests. Historically EdgeDB tests relied on a very specific client API that", "None: if self._managed: raise errors.InterfaceError( 'cannot manually rollback from within an `async with`", "def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only managed retriable transactions are supported.", "self.__started: return False try: if extype is None: await self._commit() else: await self._rollback()", "{opname}; the transaction is in error state') def __check_state(self, opname): if self._state is", "issue as original error is more # important, e.g. in case `CancelledError` it's", "'cannot enter context: already in an `async with` block') self._managed = True await", "False if not self.__started: return False try: if extype is None: await self._commit()", "await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore ) async def ensure_connected(self):", "self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params", "ex is None: # On commit we don't know if commit is succeeded", "return True def __aiter__(self): return self async def __anext__(self): # Note: when changing", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "query async def start(self) -> None: query = self._make_start_query() try: await self._connection.execute(query) except", "max_time = 0 else: max_time = start + client_config.wait_until_available iteration = 1 while", "def connect_addr(self): tr = None loop = asyncio.get_running_loop() addr = self._params.address protocol_factory =", "= True return self async def __aexit__(self, extype, ex, tb): self._managed = False", "'cannot enter context: already in an `async with` block') self._managed = True return", "self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async", "_clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol is None: return None status", "= 'START TRANSACTION;' return query def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is", "bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() result, _", "else: max_time = start + client_config.wait_until_available iteration = 1 while True: addr =", "= TransactionState.NEW self._managed = False self._nested = False type(self).ID_COUNTER += 1 self._id =", "result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if self._protocol is None: return", "await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore ) async def ensure_connected(self): if self.is_closed():", "except ssl.CertificateError as e: raise con_utils.wrap_error(e) from e except ssl.SSLError as e: if", "1 COMMITTED = 2 ROLLEDBACK = 3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER", "return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None def __init__(self,", "loop = asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop )", "kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, io_format=protocol.IoFormat.JSON_ELEMENTS, allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result def _clear_codecs_cache(self):", "self._iteration += 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None =", "except TimeoutError as e: if iteration > 1 and time.monotonic() >= max_time: raise", "*args, __limit__: int = 0, __typeids__: bool = False, __typenames__: bool = False,", "RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return", "self._connection._top_xact = None if self._nested: query = f'RELEASE SAVEPOINT {self._id};' return query def", "async def __aexit__(self, extype, ex, tb): try: if extype is not None: await", "self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the", "= asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial( asyncio_proto.AsyncIOProtocol, self._params, loop ) try:", "raise errors.InterfaceError( f'cannot {opname}; the transaction is not yet started') self.__check_state_base(opname) def _make_start_query(self):", "await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class", "if tr is not None: tr.close() raise self._protocol = pr def retrying_transaction(self) ->", "not None: status = status.decode() return status def is_closed(self): return self._protocol is None", "None: return None status = self._protocol.last_status if status is not None: status =", "yet started') self.__check_state_base(opname) def _make_start_query(self): self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot", "return result async def execute(self, query: str) -> None: await self._ensure_transaction() await self._connection.execute(query)", "self.raw_query(query_context) return result async def execute(self, query: str) -> None: await self.ensure_connected() await", "int = 0, **kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query,", "__anext__(self): # Note: when changing this code consider also # updating Retry.__next__. if", "received it or after it have been done but # network is dropped", "self.is_closed(): await self.connect() return self async def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous(", "def _make_start_query_inner(self): ... def _make_commit_query(self): self.__check_state('commit') return 'COMMIT;' def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact", "e: if tr is not None: tr.close() raise con_utils.wrap_error(e) from e except BaseException:", "self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.COMMITTED async def", "isinstance(addr, str): # UNIX socket tr, pr = await loop.create_unix_connection( protocol_factory, addr )", "= 30, timeout: int = 10, ) -> Connection: return await Connection( {", "raise def terminate(self): if not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str =", "query = f'DECLARE SAVEPOINT {self._id};' else: query = 'START TRANSACTION;' return query def", "# you may not use this file except in compliance with the License.", "else: return iteration += 1 await asyncio.sleep(0.01 + random.random() * 0.2) async def", "__check_state_base(self, opname): if self._state is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction is", "None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore ) async def", "specific client API that is no longer supported by edgedb-python. Here we implement", "super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'RELEASE", "single_attempt: max_time = 0 else: max_time = start + client_config.wait_until_available iteration = 1", "= None, host: str = None, port: int = None, credentials: str =", "None, database: str = None, tls_ca: str = None, tls_ca_file: str = None,", "except BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.STARTED async def commit(self)", "{opname}; the transaction is already committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot", "loop.call_soon(cb, self, msg) def _shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol =", "def _fetchall_json( self, query: str, *args, __limit__: int = 0, **kwargs, ): await", "= True await self.start() return self async def __aexit__(self, extype, ex, tb): try:", "return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one, allow_capabilities=edgedb_enums.Capability.ALL, #", "raise else: self._state = TransactionState.COMMITTED async def rollback(self) -> None: if self._managed: raise", "-> None: await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if not self._managed: raise", "self._rollback() async def _rollback(self) -> None: query = self._make_rollback_query() try: await self._connection.execute(query) except", "e tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object')", "EdgeDB tests relied on a very specific client API that is no longer", "Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self) def is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self)", "> 1 and time.monotonic() >= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr,", "= None, password: str = None, database: str = None, tls_ca: str =", "tr is not None: tr.close() raise pr.set_connection(self) try: await pr.connect() except OSError as", "f\" {client_config.connect_timeout} sec\" ) from e except errors.ClientConnectionError as e: if ( not", "look at original error # to find out whether we want to retry,", "None: await self._commit() else: await self._rollback() except errors.EdgeDBError as err: if ex is", "errors.InterfaceError( f'cannot {opname}; the transaction is in error state') def __check_state(self, opname): if", "License for the specific language governing permissions and # limitations under the License.", "False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query,", "case we ignore rollback issue as original error is more # important, e.g.", "else: query = 'ROLLBACK;' return query async def start(self) -> None: query =", "\"License\"); # you may not use this file except in compliance with the", "reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self, query: str,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "# to find out whether we want to retry, regardless of # the", "TransactionState(enum.Enum): NEW = 0 STARTED = 1 COMMITTED = 2 ROLLEDBACK = 3", "self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr) else: try: tr, pr = await", "# UNIX socket tr, pr = await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls:", "is not TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError( f'cannot {opname}; the transaction", "_ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, )", "return query def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact =", "BaseTransaction(abc.ABC): ID_COUNTER = 0 def __init__(self, owner): self._connection = owner self._state = TransactionState.NEW", "result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON,", "errors.InterfaceError( 'cannot manually commit from within an `async with` block') await self._commit() async", "e, addr, attempts=iteration, duration=time.monotonic() - start, )) raise nice_err from e.__cause__ else: return", "result async def _fetchall_with_headers( self, query: str, *args, __limit__: int = 0, __typeids__:", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "# type: ignore ) async def _fetchall( self, query: str, *args, __limit__: int", "self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e: if iteration", ">= max_time) ): nice_err = e.__class__( con_utils.render_client_no_connection_error( e, addr, attempts=iteration, duration=time.monotonic() - start,", "self._connection = owner self._state = TransactionState.NEW self._managed = False self._nested = False type(self).ID_COUNTER", "UNIX socket tr, pr = await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr,", "try: if isinstance(addr, str): # UNIX socket tr, pr = await loop.create_unix_connection( protocol_factory,", "def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _ = await self.raw_query(query_context) return result", "await pr.connect() except OSError as e: if tr is not None: tr.close() raise", "False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None, **kwargs, ): await self.ensure_connected()", "-> bool: return self._state is TransactionState.STARTED def __check_state_base(self, opname): if self._state is TransactionState.COMMITTED:", "= f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self): query = super()._make_rollback_query() if self._connection._top_xact", "connection._options def _retry(self, exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >=", "\"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file, \"user\": user, \"password\": password, \"database\":", "\"Use `async with transaction:`\" ) if not self.__started: self.__started = True if self._connection.is_closed():", "self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _ = await self._connection.raw_query(query_context)", "import options from edgedb.protocol import asyncio_proto # type: ignore from edgedb.protocol import protocol", "tr.close() raise pr.set_connection(self) try: await pr.connect() except OSError as e: if tr is", "query def _make_commit_query(self): query = super()._make_commit_query() if self._connection._top_xact is self: self._connection._top_xact = None", "errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and time.monotonic()", "await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, ) else: con_utils.check_alpn_protocol( tr.get_extra_info('ssl_object') ) except socket.gaierror as", "_shallow_clone(self): con = self.__class__.__new__(self.__class__) con._connect_args = self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache", "con_utils.wrap_error(e) from e except Exception: if tr is not None: tr.close() raise pr.set_connection(self)", "allow_capabilities=__allow_capabilities__, ) async def _fetchall_json( self, query: str, *args, __limit__: int = 0,", "enums as edgedb_enums from edgedb import options from edgedb.protocol import asyncio_proto # type:", "args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) async def _fetchall_json(", "`async with` block') self._managed = True await self.start() return self async def __aexit__(self,", "None, **kwargs, ): await self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache,", "test_no_tls: bool = False, wait_until_available: int = 30, timeout: int = 10, )", "except OSError as e: raise con_utils.wrap_error(e) from e except Exception: if tr is", "f'cannot {opname}; the transaction is already rolled back') if self._state is TransactionState.FAILED: raise", "self._nested: query = f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self): query = super()._make_rollback_query()", "or after it have been done but # network is dropped before we", "import con_utils from edgedb import enums as edgedb_enums from edgedb import options from", "await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory, *addr)", "result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__,", "enter context: already in an `async with` block') self._managed = True await self.start()", "Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration", "tests. Historically EdgeDB tests relied on a very specific client API that is", "is already committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError( f'cannot {opname}; the transaction", "query = self._make_rollback_query() try: await self._connection.execute(query) except BaseException: self._state = TransactionState.FAILED raise else:", "raise self._protocol = pr def retrying_transaction(self) -> Retry: return Retry(self) def transaction(self) ->", "> 1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError( f\"connecting to {addr} failed in\"", "socket.gaierror as e: # All name resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e))", "OSError as e: raise con_utils.wrap_error(e) from e except Exception: if tr is not", "annotations import typing import abc import asyncio import enum import functools import random", "is self: self._connection._top_xact = None if self._nested: query = f'RELEASE SAVEPOINT {self._id};' return", "abc import asyncio import enum import functools import random import socket import ssl", "important, e.g. in case `CancelledError` it's important # to propagate it to cancel", "= 10, ) -> Connection: return await Connection( { \"dsn\": dsn, \"host\": host,", "2.0 (the \"License\"); # you may not use this file except in compliance", "e except errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1", "block') await self._rollback() async def _rollback(self) -> None: query = self._make_rollback_query() try: await", "# type: ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]:", "= self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e: if", "License. # \"\"\"A specialized client API for EdgeDB tests. Historically EdgeDB tests relied", "query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache, io_format=query_context.query_options.io_format, expect_one=query_context.query_options.expect_one, required_one=query_context.query_options.required_one,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return result async def _fetchall_json_elements(self, query: str, *args, **kwargs): await", "were able to receive a response raise err # If we were going", "query = f'RELEASE SAVEPOINT {self._id};' return query def _make_rollback_query(self): query = super()._make_rollback_query() if", "in an `async with` block') self._managed = True return self async def __aexit__(self,", "it or after it have been done but # network is dropped before", "self._protocol.terminate() await self._protocol.wait_for_disconnect() except (Exception, asyncio.CancelledError): self.terminate() raise def terminate(self): if not self.is_closed():", "f'cannot {opname}; the transaction is in error state') def __check_state(self, opname): if self._state", "con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params return", "BaseException: if tr is not None: tr.close() raise self._protocol = pr def retrying_transaction(self)", "# # Unless required by applicable law or agreed to in writing, software", "'cannot manually commit from within an `async with` block') await self._commit() async def", "express or implied. # See the License for the specific language governing permissions", "is self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};'", "supported. \" \"Use `async with transaction:`\" ) if not self.__started: self.__started = True", "= connection self._iteration = 0 self._done = False self._next_backoff = 0 self._options =", "not self.is_closed(): self._protocol.abort() async def async_connect_test_client( dsn: str = None, host: str =", "self._query_cache async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _ = await self.raw_query(query_context)", "type: ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) -> typing.Optional[str]: if", "either express or implied. # See the License for the specific language governing", "case `CancelledError` it's important # to propagate it to cancel the whole task.", "connect_addr(self): tr = None loop = asyncio.get_running_loop() addr = self._params.address protocol_factory = functools.partial(", "def execute(self, query: str) -> None: await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self):", "is_closed(self): return self._protocol is None or not self._protocol.connected async def connect(self, single_attempt=False): self._params,", "TimeoutError as e: if iteration > 1 and time.monotonic() >= max_time: raise errors.ClientConnectionTimeoutError(", "reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result async def _fetchall_with_headers(", "0, __typeids__: bool = False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int] = None,", "= 0 else: max_time = start + client_config.wait_until_available iteration = 1 while True:", "self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop()", "self._nested: query = f'DECLARE SAVEPOINT {self._id};' else: query = 'START TRANSACTION;' return query", "= self._connect_args con._protocol = self._protocol con._query_cache = self._query_cache con._test_no_tls = self._test_no_tls con._params =", "= self._query_cache con._test_no_tls = self._test_no_tls con._params = self._params return con def _get_query_cache(self) ->", "{self._id};' return query async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot enter context:", "str) -> None: await self.ensure_connected() await self._protocol.simple_query( query, edgedb_enums.Capability.ALL # type: ignore )", "the License. # You may obtain a copy of the License at #", "con_utils.wrap_error(e) from e except BaseException: if tr is not None: tr.close() raise self._protocol", "__aexit__(self, extype, ex, tb): self._managed = False if not self.__started: return False try:", "remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop() for cb", "def __check_state(self, opname): if self._state is not TransactionState.STARTED: if self._state is TransactionState.NEW: raise", "= await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typenames=False, io_format=protocol.IoFormat.JSON, ) return", "= iteration self.__started = False async def __aenter__(self): if self._managed: raise errors.InterfaceError( 'cannot", "_fetchall_json( self, query: str, *args, __limit__: int = 0, **kwargs, ): await self.ensure_connected()", "e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr = await", "abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params = None self._log_listeners = set()", "if self._iteration >= rule.attempts: return False self._done = False self._next_backoff = rule.backoff(self._iteration) return", "is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction is already started') return self._make_start_query_inner()", "codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params = None self._log_listeners = set() def", "`async with` block') await self._commit() async def _commit(self) -> None: query = self._make_commit_query()", "we were able to receive a response raise err # If we were", "_retry(self, exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return", "allow_capabilities=edgedb_enums.Capability.EXECUTE, # type: ignore ) return result def _clear_codecs_cache(self): self._query_cache.codecs_registry.clear_cache() def _get_last_status(self) ->", "abstract.QueryCache: return self._connection._query_cache async def _query(self, query_context: abstract.QueryContext): await self._ensure_transaction() result, _ =", "self._protocol.last_status if status is not None: status = status.decode() return status def is_closed(self):", "async def commit(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually commit from", "__check_state(self, opname): if self._state is not TransactionState.STARTED: if self._state is TransactionState.NEW: raise errors.InterfaceError(", "e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr = await loop.create_connection( functools.partial(protocol_factory,", "False type(self).ID_COUNTER += 1 self._id = f'raw_tx_{self.ID_COUNTER}' def is_active(self) -> bool: return self._state", "= await self._connection.raw_query(query_context) return result async def execute(self, query: str) -> None: await", "if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO", "**kwargs, ): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry,", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr = await loop.create_connection(", "= f'ROLLBACK TO SAVEPOINT {self._id};' return query async def __aenter__(self): if self._managed: raise", "str, *args, __limit__: int = 0, **kwargs, ): await self.ensure_connected() result, _ =", ") except TimeoutError as e: if iteration > 1 and time.monotonic() >= max_time:", "self._done = False self._next_backoff = 0 self._options = connection._options def _retry(self, exc): self._last_exception", "iteration = 1 while True: addr = self._params.address try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout,", "else: try: tr, pr = await loop.create_connection( protocol_factory, *addr, ssl=self._params.ssl_ctx ) except ssl.CertificateError", "query: str, *args, **kwargs): await self.ensure_connected() result, _ = await self._protocol.execute_anonymous( query=query, args=args,", "after it have been done but # network is dropped before we were", "0 ) await self.start() class Retry: def __init__(self, connection): self._connection = connection self._iteration", "database: str = None, tls_ca: str = None, tls_ca_file: str = None, tls_security:", "don't know if commit is succeeded before the # database have received it", "await asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self, self._connection, self._iteration) self._iteration += 1", "= None self._query_cache = abstract.QueryCache( codecs_registry=protocol.CodecsRegistry(), query_cache=protocol.QueryCodecsCache(), ) self._test_no_tls = test_no_tls self._params =", "self.ensure_connected() return await self._protocol.execute_anonymous( query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY,", "transaction block self._nested = True if self._nested: query = f'DECLARE SAVEPOINT {self._id};' else:", "on a very specific client API that is no longer supported by edgedb-python.", "to propagate it to cancel the whole task. # NOTE: rollback error is", "# network is dropped before we were able to receive a response raise", "= True if self._connection.is_closed(): await self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start() class", "self._connection.connect( single_attempt=self.__iteration != 0 ) await self.start() class Retry: def __init__(self, connection): self._connection", "async def start(self) -> None: query = self._make_start_query() try: await self._connection.execute(query) except BaseException:", "self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con = self._connection if con._top_xact is", "= 0, __typeids__: bool = False, __typenames__: bool = False, __allow_capabilities__: typing.Optional[int] =", "self._state = TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con", "self: self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' else:", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "self.__check_state_base('start') if self._state is TransactionState.STARTED: raise errors.InterfaceError( 'cannot start; the transaction is already", "\"database\": database, \"timeout\": timeout, \"tls_ca\": tls_ca, \"tls_ca_file\": tls_ca_file, \"tls_security\": tls_security, \"wait_until_available\": wait_until_available, },", "self._protocol = pr def retrying_transaction(self) -> Retry: return Retry(self) def transaction(self) -> RawTransaction:", ") return result async def _fetchall_with_headers( self, query: str, *args, __limit__: int =", "except errors.ClientConnectionError as e: if ( not e.has_tag(errors.SHOULD_RECONNECT) or (iteration > 1 and", "self async def __anext__(self): # Note: when changing this code consider also #", "self._connection, self._iteration) self._iteration += 1 return iteration class Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction |", "= True iteration = Iteration(self, self._connection, self._iteration) self._iteration += 1 return iteration class", "def _make_rollback_query(self): self.__check_state('rollback') if self._connection._top_xact is self: self._connection._top_xact = None if self._nested: query", "BaseException: self._state = TransactionState.FAILED raise else: self._state = TransactionState.STARTED async def commit(self) ->", "None, test_no_tls: bool = False, wait_until_available: int = 30, timeout: int = 10,", "as e: raise con_utils.wrap_error(e) from e except ssl.SSLError as e: if e.reason ==", "is TransactionState.COMMITTED: raise errors.InterfaceError( f'cannot {opname}; the transaction is already committed') if self._state", "return self._query_cache async def _query(self, query_context: abstract.QueryContext): await self.ensure_connected() result, _ = await", "raise errors.InterfaceError( 'cannot start; the transaction is already started') return self._make_start_query_inner() @abc.abstractmethod def", "def _make_start_query_inner(self): con = self._connection if con._top_xact is None: con._top_xact = self else:", "authors. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "self.__iteration = iteration self.__started = False async def __aenter__(self): if self._managed: raise errors.InterfaceError(", "block self._nested = True if self._nested: query = f'DECLARE SAVEPOINT {self._id};' else: query", "dbname(self) -> str: return self._params.database def connected_addr(self): return self._params.address async def aclose(self): if", "_top_xact: RawTransaction | None = None def __init__(self, connect_args, *, test_no_tls=False): super().__init__() self._connect_args", "'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr,", "-> None: if self._managed: raise errors.InterfaceError( 'cannot manually rollback from within an `async", "def retrying_transaction(self) -> Retry: return Retry(self) def transaction(self) -> RawTransaction: return RawTransaction(self) def", "= self._connection if con._top_xact is None: con._top_xact = self else: # Nested transaction", "except ssl.SSLError as e: if e.reason == 'CERTIFICATE_VERIFY_FAILED': raise con_utils.wrap_error(e) from e tr,", "result async def execute(self, query: str) -> None: await self.ensure_connected() await self._protocol.simple_query( query,", "except in compliance with the License. # You may obtain a copy of", "= await loop.create_unix_connection( protocol_factory, addr ) elif self._test_no_tls: tr, pr = await loop.create_connection(protocol_factory,", "self.start() return self async def __aexit__(self, extype, ex, tb): try: if extype is", "regardless of # the rollback error. # In this case we ignore rollback", "= 0 STARTED = 1 COMMITTED = 2 ROLLEDBACK = 3 FAILED =", "!= 0 ) await self.start() class Retry: def __init__(self, connection): self._connection = connection", "extype is None: await self._commit() else: await self._rollback() except errors.EdgeDBError as err: if", "important # to propagate it to cancel the whole task. # NOTE: rollback", "False self._next_backoff = 0 self._options = connection._options def _retry(self, exc): self._last_exception = exc", "def _on_log_message(self, msg): if self._log_listeners: loop = asyncio.get_running_loop() for cb in self._log_listeners: loop.call_soon(cb,", "longer supported by edgedb-python. Here we implement that API (for example, transactions can", "from e except Exception: if tr is not None: tr.close() raise pr.set_connection(self) try:", "self._log_listeners = set() def add_log_listener(self, callback): self._log_listeners.add(callback) def remove_log_listener(self, callback): self._log_listeners.discard(callback) def _on_log_message(self,", "code consider also # updating Retry.__next__. if self._done: raise StopAsyncIteration if self._next_backoff: await", "status is not None: status = status.decode() return status def is_closed(self): return self._protocol", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "TransactionState.STARTED async def commit(self) -> None: if self._managed: raise errors.InterfaceError( 'cannot manually commit", "None: status = status.decode() return status def is_closed(self): return self._protocol is None or", "result, _ = await self._connection.raw_query(query_context) return result async def execute(self, query: str) ->", "raise con_utils.wrap_error(e) from e tr, pr = await loop.create_connection( functools.partial(protocol_factory, tls_compat=True), *addr, )", "try: await asyncio.wait_for( self.connect_addr(), client_config.connect_timeout, ) except TimeoutError as e: if iteration >", "is None: con._top_xact = self else: # Nested transaction block self._nested = True", "async def raw_query(self, query_context: abstract.QueryContext): return await self._protocol.execute_anonymous( query=query_context.query.query, args=query_context.query.args, kwargs=query_context.query.kwargs, reg=query_context.cache.codecs_registry, qc=query_context.cache.query_cache,", "result async def _fetchall_json_elements(self, query: str, *args, **kwargs): await self.ensure_connected() result, _ =", "def _retry(self, exc): self._last_exception = exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts:", "self.ensure_connected() result, _ = await self.raw_query(query_context) return result async def execute(self, query: str)", "is_in_transaction(self): return self._protocol.is_in_transaction() def get_settings(self) -> typing.Dict[str, typing.Any]: return self._protocol.get_settings() @property def dbname(self)", "async def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only managed retriable transactions are", "await self._ensure_transaction() await self._connection.execute(query) async def _ensure_transaction(self): if not self._managed: raise errors.InterfaceError( \"Only", "query=query, args=args, kwargs=kwargs, reg=self._query_cache.codecs_registry, qc=self._query_cache.query_cache, implicit_limit=__limit__, inline_typeids=__typeids__, inline_typenames=__typenames__, io_format=protocol.IoFormat.BINARY, allow_capabilities=__allow_capabilities__, ) return result", "= exc rule = self._options.retry_options.get_rule_for_exception(exc) if self._iteration >= rule.attempts: return False self._done =", "= TransactionState.FAILED raise else: self._state = TransactionState.ROLLEDBACK class RawTransaction(BaseTransaction): def _make_start_query_inner(self): con =", "self._protocol.connected async def connect(self, single_attempt=False): self._params, client_config = con_utils.parse_connect_arguments( **self._connect_args, command_timeout=None, server_settings=None, )", "that is no longer supported by edgedb-python. Here we implement that API (for", "2 ROLLEDBACK = 3 FAILED = 4 class BaseTransaction(abc.ABC): ID_COUNTER = 0 def", "Connection(options._OptionsMixin, abstract.AsyncIOExecutor): _top_xact: RawTransaction | None = None def __init__(self, connect_args, *, test_no_tls=False):", "import functools import random import socket import ssl import time from edgedb import", "retry, regardless of # the rollback error. # In this case we ignore", "if self._next_backoff: await asyncio.sleep(self._next_backoff) self._done = True iteration = Iteration(self, self._connection, self._iteration) self._iteration", "# Note: when changing this code consider also # updating Retry.__next__. if self._done:", "f'cannot {opname}; the transaction is already committed') if self._state is TransactionState.ROLLEDBACK: raise errors.InterfaceError(", "self._connection._top_xact = None if self._nested: query = f'ROLLBACK TO SAVEPOINT {self._id};' return query", "Note: when changing this code consider also # updating Retry.__next__. if self._done: raise", "from e except OSError as e: raise con_utils.wrap_error(e) from e except Exception: if", "return False try: if extype is None: await self._commit() else: await self._rollback() except", "as e: # All name resolution errors are considered temporary raise errors.ClientConnectionFailedTemporarilyError(str(e)) from", "await Connection( { \"dsn\": dsn, \"host\": host, \"port\": port, \"credentials\": credentials, \"credentials_file\": credentials_file,", "Retry: def __init__(self, connection): self._connection = connection self._iteration = 0 self._done = False" ]
[ "'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5, # adaptive LDA, 0.5 <=>", "print \"D(prod, prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi)))))", "in [0.01, 0.02, 0.05]: #for alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5,", "dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0,", "\"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run,", "alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several", "phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta': beta,", "= nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi,", "0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta", "gamma print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod,", "= 200 d = 100 t = 6 beta0 = 0.01 # const", "'verbose': False, 'gamma': .5, # adaptive LDA, 0.5 <=> 1 / n_regularizers }", "phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta',", "= nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm:", "phi, theta = nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm", "= np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd,", "0.02, 0.05]: #for alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7,", "\"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print", "run in range(1): seeds = [30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]: #for", "[0.1, 0.5, 1.]: params['gamma'] = gamma phi, theta = nmf(collection, t, phi1, theta1,", "alpha0 in [0.01, 0.02, 0.05]: #for alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4,", "generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha,", "=\", gamma print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print", "results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run,", "= 0.01 # const n_iter = 300 results = open(sys.argv[1], \"w\") for run", "generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma':", "algorithm in ['em','lda']: phi, theta = nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params)", "as np import sys w = 200 d = 100 t = 6", "for run in range(1): seeds = [30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]:", "0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]:", "t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0,", "theta) print \"D(prod, prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi',", "0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha", "in range(1): seeds = [30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]: #for alpha0", "generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta': beta, 'use_early_stopping': False,", "prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params", "print \"Algorithm: adaptive lda, gamma =\", gamma print \"D(phi, phi0):\", dist(phi0, phi) print", "gamma =\", gamma print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta)", "adaptive LDA, 0.5 <=> 1 / n_regularizers } print \"Alpha0:\", alpha0 for algorithm", "params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\", gamma print \"D(phi,", "= generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta': beta, 'use_early_stopping':", "nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive", "open(sys.argv[1], \"w\") for run in range(1): seeds = [30+run,40+run] for alpha0 in [0.01,", "= generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha':", "'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod',", "dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta)))))", "= gamma phi, theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm", "params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0,", "= generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False,", "0.5, 1.]: params['gamma'] = gamma phi, theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda',", "= 100 t = 6 beta0 = 0.01 # const n_iter = 300", "import * from algorithms import nmf import numpy as np import sys w", "params['gamma'] = gamma phi, theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params)", "phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0, phi *", "300 results = open(sys.argv[1], \"w\") for run in range(1): seeds = [30+run,40+run] for", "prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run,", "# adaptive LDA, 0.5 <=> 1 / n_regularizers } print \"Alpha0:\", alpha0 for", "1 / n_regularizers } print \"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi, theta", "#for alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9,", "np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd, collection", "(prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm,", "utils import * from algorithms import nmf import numpy as np import sys", "n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\", gamma print", "0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8,", "dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0, phi", "print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0, phi * theta)", "in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4,", "algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm,", "* alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd, collection =", "'gamma': .5, # adaptive LDA, 0.5 <=> 1 / n_regularizers } print \"Alpha0:\",", "theta = nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print", "algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\", gamma", "theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi) print", "{ 'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5, # adaptive", "alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0,", "['em','lda']: phi, theta = nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\",", "results = open(sys.argv[1], \"w\") for run in range(1): seeds = [30+run,40+run] for alpha0", "False, 'verbose': False, 'gamma': .5, # adaptive LDA, 0.5 <=> 1 / n_regularizers", "for gamma in [0.1, 0.5, 1.]: params['gamma'] = gamma phi, theta = nmf(collection,", "False, 'gamma': .5, # adaptive LDA, 0.5 <=> 1 / n_regularizers } print", "algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive LDA for gamma in", "alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") print results.close()", "= 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\", gamma print \"D(phi, phi0):\", dist(phi0,", "0.05]: #for alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8,", "0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta =", "beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5, # adaptive LDA, 0.5 <=> 1", "0.01 # const n_iter = 300 results = open(sys.argv[1], \"w\") for run in", "[0.01, 0.02, 0.05]: #for alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6,", "'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive LDA for gamma in [0.1,", "adaptive lda, gamma =\", gamma print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\",", "alpha0 in [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.,", "\"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi, theta = nmf(collection, t, phi1, theta1,", "'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of", "= open(sys.argv[1], \"w\") for run in range(1): seeds = [30+run,40+run] for alpha0 in", "100 t = 6 beta0 = 0.01 # const n_iter = 300 results", "d = 100 t = 6 beta0 = 0.01 # const n_iter =", "* theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta)))))", "6 beta0 = 0.01 # const n_iter = 300 results = open(sys.argv[1], \"w\")", "[0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6,", "print \"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi, theta = nmf(collection, t, phi1,", "results.write(\"\\n\") # several versions of adaptive LDA for gamma in [0.1, 0.5, 1.]:", ".5, # adaptive LDA, 0.5 <=> 1 / n_regularizers } print \"Alpha0:\", alpha0", "nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params =", "several versions of adaptive LDA for gamma in [0.1, 0.5, 1.]: params['gamma'] =", "0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() *", "theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed)", "LDA for gamma in [0.1, 0.5, 1.]: params['gamma'] = gamma phi, theta =", "t = 6 beta0 = 0.01 # const n_iter = 300 results =", "phi, theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma)", "print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\",", "theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\",", "= [30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]: #for alpha0 in [0.05, 0.1,", "phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi)", "[30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]: #for alpha0 in [0.05, 0.1, 0.2,", "np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 =", "in ['em','lda']: phi, theta = nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print", "params = { 'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5,", "collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = {", "'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\", gamma print \"D(phi, phi0):\", dist(phi0, phi)", "n_regularizers } print \"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi, theta = nmf(collection,", "* beta0 phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed)", "alpha0 for algorithm in ['em','lda']: phi, theta = nmf(collection, t, phi1, theta1, algorithm=algorithm,", "range(1): seeds = [30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]: #for alpha0 in", "results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") #", "= 300 results = open(sys.argv[1], \"w\") for run in range(1): seeds = [30+run,40+run]", "beta = np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1]", "= np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1", "0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel()", "\"D(prod, prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\")", "algorithm print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod,", "from algorithms import nmf import numpy as np import sys w = 200", "import sys w = 200 d = 100 t = 6 beta0 =", "1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() *", "phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0,", "n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\",", "print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta)", "adaptive LDA for gamma in [0.1, 0.5, 1.]: params['gamma'] = gamma phi, theta", "w = 200 d = 100 t = 6 beta0 = 0.01 #", "gamma phi, theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm =", "alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0])", "of adaptive LDA for gamma in [0.1, 0.5, 1.]: params['gamma'] = gamma phi,", "algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma =\", gamma print \"D(phi, phi0):\",", "beta0 phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1", "LDA, 0.5 <=> 1 / n_regularizers } print \"Alpha0:\", alpha0 for algorithm in", "alpha = np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0, theta0, prod0,", "n_iter = 300 results = open(sys.argv[1], \"w\") for run in range(1): seeds =", "seeds = [30+run,40+run] for alpha0 in [0.01, 0.02, 0.05]: #for alpha0 in [0.05,", "algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions", "lda, gamma =\", gamma print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0,", "results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\")", "from utils import * from algorithms import nmf import numpy as np import", "results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive LDA for", "beta0 = 0.01 # const n_iter = 300 results = open(sys.argv[1], \"w\") for", "theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose':", "for algorithm in ['em','lda']: phi, theta = nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter,", "# several versions of adaptive LDA for gamma in [0.1, 0.5, 1.]: params['gamma']", "algorithms import nmf import numpy as np import sys w = 200 d", "2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0, theta0,", "1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() * beta0 phi0,", "import numpy as np import sys w = 200 d = 100 t", "np import sys w = 200 d = 100 t = 6 beta0", "# const n_iter = 300 results = open(sys.argv[1], \"w\") for run in range(1):", "phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda, gamma", "nmf(collection, t, phi1, theta1, algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\",", "alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive LDA for gamma", "'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5, # adaptive LDA,", "theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'phi', dist(phi0,phi))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\")", "\"w\") for run in range(1): seeds = [30+run,40+run] for alpha0 in [0.01, 0.02,", "for alpha0 in [0.01, 0.02, 0.05]: #for alpha0 in [0.05, 0.1, 0.2, 0.3,", "0.5 <=> 1 / n_regularizers } print \"Alpha0:\", alpha0 for algorithm in ['em','lda']:", "<=> 1 / n_regularizers } print \"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi,", "sys w = 200 d = 100 t = 6 beta0 = 0.01", "0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha =", "= 6 beta0 = 0.01 # const n_iter = 300 results = open(sys.argv[1],", "const n_iter = 300 results = open(sys.argv[1], \"w\") for run in range(1): seeds", "algorithm=algorithm, n_iter=n_iter, params=params) print \"Algorithm:\", algorithm print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta,", "phi0, theta0, prod0, nd, collection = generate_all(w,d,t,alpha,beta,seed=seeds[0]) seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 =", "/ n_regularizers } print \"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi, theta =", "alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5, # adaptive LDA, 0.5", "t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print \"Algorithm: adaptive lda,", "theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter, params=params) algorithm = 'adaptive_lda_'+str(gamma) print", "'use_early_stopping': False, 'verbose': False, 'gamma': .5, # adaptive LDA, 0.5 <=> 1 /", "dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive LDA for gamma in [0.1, 0.5,", "results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'theta', dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") print", "gamma in [0.1, 0.5, 1.]: params['gamma'] = gamma phi, theta = nmf(collection, t,", "in [0.1, 0.5, 1.]: params['gamma'] = gamma phi, theta = nmf(collection, t, phi1,", "\"Algorithm: adaptive lda, gamma =\", gamma print \"D(phi, phi0):\", dist(phi0, phi) print \"D(theta,", "} print \"Alpha0:\", alpha0 for algorithm in ['em','lda']: phi, theta = nmf(collection, t,", "\"D(phi, phi0):\", dist(phi0, phi) print \"D(theta, theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist", "1.]: params['gamma'] = gamma phi, theta = nmf(collection, t, phi1, theta1, algorithm='adaptive_lda', n_iter=n_iter,", "= { 'alpha': alpha, 'beta': beta, 'use_early_stopping': False, 'verbose': False, 'gamma': .5, #", "* from algorithms import nmf import numpy as np import sys w =", "1., 1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel()", "seed=seeds[1] phi1 = generate_phi(w,t,beta,seed=seed) theta1 = generate_theta(d,t,alpha,seed=seed) params = { 'alpha': alpha, 'beta':", "theta0)\", dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0,", "dist(theta0, theta) print \"D(prod, prod0)\", dist (prod0, phi * theta) results.write(\"\\t\".join(map(str,(run, alpha0, algorithm,", "200 d = 100 t = 6 beta0 = 0.01 # const n_iter", "1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0 beta = np.ones((1,w)).ravel() * beta0", "import nmf import numpy as np import sys w = 200 d =", "versions of adaptive LDA for gamma in [0.1, 0.5, 1.]: params['gamma'] = gamma", "nmf import numpy as np import sys w = 200 d = 100", "0.7, 0.8, 0.9, 1., 1.2,1.4, 1.6, 1.8, 2.]: alpha = np.ones((1,t)).ravel() * alpha0", "results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive LDA", "numpy as np import sys w = 200 d = 100 t =", "dist(theta0,theta))))) results.write(\"\\n\") results.write(\"\\t\".join(map(str,(run, alpha0, algorithm, 'prod', dist(prod0,phi*theta))))) results.write(\"\\n\") # several versions of adaptive" ]
[ "json import requests from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions", ",\" driver.quit() json_output = json_output[:-1] json_output += \" ]\" data = {'payload': json_output}", "bridge_id = split_item[0] bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status,", "= split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString()", "driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item in list_items: split_item", "NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless')", "webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections')", "from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api =", "= 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5)", "Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge", "\"[ \" for item in list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:',", "'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window()", "json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url, data=data) print(json_output) except: print('An error occurred.')", "driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output", "json_output = \"[ \" for item in list_items: split_item = item.text.replace('Bridge ', '').replace(", "next_arrival) json_output += canal_bridge.toJsonString() + \" ,\" driver.quit() json_output = json_output[:-1] json_output +=", "driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \"", "selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source =", "welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options)", "= webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements =", "webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[", "Keys from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options", "import json import requests from selenium import webdriver from selenium.webdriver.common.keys import Keys from", "import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox')", "json_output += canal_bridge.toJsonString() + \" ,\" driver.quit() json_output = json_output[:-1] json_output += \"", "canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu')", "bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output +=", "driver.quit() json_output = json_output[:-1] json_output += \" ]\" data = {'payload': json_output} update_status_url", "= split_item[0] bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival)", "bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \" ,\" driver.quit() json_output = json_output[:-1] json_output", "selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech'", "'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status =", "chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\")", "'').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status = split_item[2] next_arrival", "= \"[ \" for item in list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge", "list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item in list_items: split_item = item.text.replace('Bridge ',", "split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines()", "chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items =", "= json_output[:-1] json_output += \" ]\" data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status'", "]\" data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url, data=data) print(json_output)", "canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \" ,\" driver.quit() json_output", "= {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url, data=data) print(json_output) except: print('An", "webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api", "selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions()", "from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options =", "list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item in", "import time import bridge import json import requests from selenium import webdriver from", "list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ',", "next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \"", "'').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status = split_item[2] next_arrival = split_item[3]", "bridge import json import requests from selenium import webdriver from selenium.webdriver.common.keys import Keys", "= 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver", "+= canal_bridge.toJsonString() + \" ,\" driver.quit() json_output = json_output[:-1] json_output += \" ]\"", "+= \" ]\" data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url,", "= webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output =", "item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id =", "json_output += \" ]\" data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request =", "', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0]", "json_output[:-1] json_output += \" ]\" data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request", "driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item", "', '').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status = split_item[2] next_arrival =", "'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver =", "import webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E'", "driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for", "bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \" ,\" driver.quit() json_output = json_output[:-1]", "+ \" ,\" driver.quit() json_output = json_output[:-1] json_output += \" ]\" data =", "for item in list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ',", "list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item in list_items: split_item =", "\" for item in list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status:", "= bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \" ,\" driver.quit() json_output =", "canal_bridge.toJsonString() + \" ,\" driver.quit() json_output = json_output[:-1] json_output += \" ]\" data", "{'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url, data=data) print(json_output) except: print('An error", "in list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival:", "import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source = 'http://www.greatlakes-seaway.com/R2/jsp/mNiaBrdgStatus.jsp?language=E' welland_canal_api = 'https://wellandcanalapi.kaluba.tech' try:", "Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status = split_item[2]", "split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() +", "chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements = driver.find_element_by_css_selector('div.sections') list_items", "item in list_items: split_item = item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next", "from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException canal_web_source", "= split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \" ,\"", "split_item[0] bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output", "json_output = json_output[:-1] json_output += \" ]\" data = {'payload': json_output} update_status_url =", "try: chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source)", "= item.text.replace('Bridge ', '').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id", "data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url, data=data) print(json_output) except:", "import bridge import json import requests from selenium import webdriver from selenium.webdriver.common.keys import", "split_item[3] canal_bridge = bridge.Bridge(bridge_id, bridge_status, next_arrival) json_output += canal_bridge.toJsonString() + \" ,\" driver.quit()", "chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=chrome_options) driver.implicitly_wait(5) driver.maximize_window() driver.get(canal_web_source) list_elements", "\" ,\" driver.quit() json_output = json_output[:-1] json_output += \" ]\" data = {'payload':", "', '').splitlines() bridge_id = split_item[0] bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge =", "= driver.find_element_by_css_selector('div.sections') list_items = list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item in list_items:", "'').splitlines() bridge_id = split_item[0] bridge_status = split_item[2] next_arrival = split_item[3] canal_bridge = bridge.Bridge(bridge_id,", "'').replace( 'Bridge Status:', '').replace('Status: ', '').replace('Next Arrival: ', '').splitlines() bridge_id = split_item[0] bridge_status", "import requests from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import", "requests from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException", "= list_elements.find_elements_by_tag_name(\"li\") json_output = \"[ \" for item in list_items: split_item = item.text.replace('Bridge", "time import bridge import json import requests from selenium import webdriver from selenium.webdriver.common.keys", "\" ]\" data = {'payload': json_output} update_status_url = welland_canal_api+'/update_bridge_status' request = requests.post(url=update_status_url, data=data)" ]
[ "modular source code checker: pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional =", "'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml", "repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\" \" args:", "https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is a rev\\n\" \" hooks:\\n\" \" -", "( \"repos:\\n\" \" # local hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\" \"", "args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is a", "'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the", "pre_config_repo = next( (item for item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None", "of the repo \"\"\" with open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo", "from typing import Optional import yaml # A lock file LOCK_CONTENT = (", "\"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n'", "\">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest: simple powerful", "https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\" \" args: [--filter-files]\\n\"", "= \"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version", "# local hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\" \" - id: sync\\n\"", "# A lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version =", "comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \" - id:", "https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\"", "version of the repo \"\"\" with open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream)", "\" entry: swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\" \"", "\"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional static typing for Python\"\\n' 'category =", "= \"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional static typing for Python\"\\n' 'category", "'description = \"pytest: simple powerful testing with Python\"\\n' 'category = \"dev\"\\n' \"optional =", "# another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \"", "repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\" \" args:", "code checker: pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions", "hooks:\\n\" \" - id: black\\n\" \" # another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\"", "\" # another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\"", "= \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version", "\"optional = false\\n\" 'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT =", "[--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return the version (i.e.,", "- id: isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str) ->", "= \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version", "'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional static typing for Python\"\\n'", "\" hooks:\\n\" \" - id: mypy\\n\" \" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\"", "\"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest: simple powerful testing", "code formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name", "swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\" \" - repo:", "repo \"\"\" with open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next(", "id: isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str) -> Optional[str]:", "the version (i.e., rev) of a repo Args: filename (str): .pre-commit-config.yaml repo (str):", "import yaml # A lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n'", "str) -> Optional[str]: \"\"\"Return the version (i.e., rev) of a repo Args: filename", "'version = \"6.2.5\"\\n' 'description = \"pytest: simple powerful testing with Python\"\\n' 'category =", "\" # local hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\" \" - id:", "hooks:\\n\" \" - id: isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo:", "repo: str) -> Optional[str]: \"\"\"Return the version (i.e., rev) of a repo Args:", "\"\"\" with open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item", "Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' ) # A", "pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None ) if pre_config_repo: return pre_config_repo[\"rev\"] return None", "= ( \"repos:\\n\" \" # local hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\"", "local hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\" \" - id: sync\\n\" \"", "typing for Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\"", "repo Args: filename (str): .pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]: the version", "\"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version =", "[--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is a rev\\n\"", "\" - id: isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str)", "another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \" -", "get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return the version (i.e., rev) of a", "\"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version =", "= \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest: simple powerful testing with Python\"\\n'", "item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None ) if pre_config_repo: return pre_config_repo[\"rev\"]", "static typing for Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n'", "https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\" \" # comment\\n\"", "'description = \"the modular source code checker: pep8 pyflakes and co\"\\n' 'category =", "hooks:\\n\" \" - id: mypy\\n\" \" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \"", "\" - id: mypy\\n\" \" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev:", "yaml # A lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version", "= \">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \" #", "pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n'", "- repo: local\\n\" \" hooks:\\n\" \" - id: sync\\n\" \" name: sync with", "\"0.910\"\\n' 'description = \"Optional static typing for Python\"\\n' 'category = \"dev\"\\n' \"optional =", "rev\\n\" \" hooks:\\n\" \" - id: black\\n\" \" # another repo\\n\" \" -", "next( (item for item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None ) if", "file CONFIG_CONTENT = ( \"repos:\\n\" \" # local hooks\\n\" \" - repo: local\\n\"", "URL Returns: Optional[str]: the version of the repo \"\"\" with open(filename, \"r\") as", "- id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2", "args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return the version", "# mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \" -", "= \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the modular source code checker: pep8", "= \"6.2.5\"\\n' 'description = \"pytest: simple powerful testing with Python\"\\n' 'category = \"dev\"\\n'", "= \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest: simple", "rev: 3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" -", "- repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\" \"", "co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name =", "sync\\n\" \" name: sync with poetry\\n\" \" entry: swp\\n\" \" language: system\\n\" \"", "hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\" \" - id: sync\\n\" \" name:", "\" args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return the", "filename (str): .pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]: the version of the", "repo URL Returns: Optional[str]: the version of the repo \"\"\" with open(filename, \"r\")", "Returns: Optional[str]: the version of the repo \"\"\" with open(filename, \"r\") as stream:", "= next( (item for item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None )", "\"\"\"Return the version (i.e., rev) of a repo Args: filename (str): .pre-commit-config.yaml repo", "'description = \"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\" 'python-versions", "with poetry\\n\" \" entry: swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\" \" #", "# A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \" # local hooks\\n\" \"", "= false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description", "\"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\"", "= \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The uncompromising", "rev: 5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\" \" args: [--filter-files]\\n\" ) def", "'version = \"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional", "LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional", "pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\"", "false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description =", "repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \" - id:", "\"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n'", "\"6.2.5\"\\n' 'description = \"pytest: simple powerful testing with Python\"\\n' 'category = \"dev\"\\n' \"optional", "name: sync with poetry\\n\" \" entry: swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\"", "local\\n\" \" hooks:\\n\" \" - id: sync\\n\" \" name: sync with poetry\\n\" \"", "v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\" \" # comment\\n\" \" - repo:", "\"the modular source code checker: pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional", "\"Optional static typing for Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions =", "<gh_stars>1-10 from typing import Optional import yaml # A lock file LOCK_CONTENT =", "\" hooks:\\n\" \" - id: black\\n\" \" # another repo\\n\" \" - repo:", "\" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\"", "import Optional import yaml # A lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name", "as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item for item in pre_commit_data[\"repos\"]", ".pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \" # local hooks\\n\" \" - repo:", "is a rev\\n\" \" hooks:\\n\" \" - id: black\\n\" \" # another repo\\n\"", ") def get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return the version (i.e., rev)", "- id: black\\n\" \" # another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev:", "entry: swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\" \" -", "file LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description =", "\"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest: simple powerful testing with Python\"\\n' 'category", "( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional static typing", "powerful testing with Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n'", "pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item for item in pre_commit_data[\"repos\"] if item[\"repo\"]", "\"pytest: simple powerful testing with Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions", "id: sync\\n\" \" name: sync with poetry\\n\" \" entry: swp\\n\" \" language: system\\n\"", "a repo Args: filename (str): .pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]: the", "formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name =", "'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the modular source code checker:", "\" files: poetry.lock\\n\" \" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\"", "with open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item for", "- id: mypy\\n\" \" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\"", "poetry\\n\" \" entry: swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\"", "mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \" - id:", "the version of the repo \"\"\" with open(filename, \"r\") as stream: pre_commit_data =", "= \"pytest: simple powerful testing with Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\"", "\">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \" # local", "repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\" \" #", "21.5b2 # this is a rev\\n\" \" hooks:\\n\" \" - id: black\\n\" \"", "(i.e., rev) of a repo Args: filename (str): .pre-commit-config.yaml repo (str): repo URL", "'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \"", "\" - repo: local\\n\" \" hooks:\\n\" \" - id: sync\\n\" \" name: sync", "'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n' 'category", "= \"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n' 'category =", "\" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \"", "\"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT", ") # A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \" # local hooks\\n\"", "hooks:\\n\" \" - id: sync\\n\" \" name: sync with poetry\\n\" \" entry: swp\\n\"", "\"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n'", "repo: local\\n\" \" hooks:\\n\" \" - id: sync\\n\" \" name: sync with poetry\\n\"", "= \"4.0.1\"\\n' 'description = \"the modular source code checker: pep8 pyflakes and co\"\\n'", "Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name =", "\"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the modular source code", "for Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name", "\" - id: sync\\n\" \" name: sync with poetry\\n\" \" entry: swp\\n\" \"", "\" language: system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\"", "= false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description", "\"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n'", "\"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional static typing for", "# comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \" -", "repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is a rev\\n\" \" hooks:\\n\" \"", "\" hooks:\\n\" \" - id: isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename: str,", "'category = \"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n'", "'description = \"Optional static typing for Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\"", "5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename:", "\" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is", "checker: pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions =", "'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n'", "\" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\"", "repo (str): repo URL Returns: Optional[str]: the version of the repo \"\"\" with", "\" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \"", "testing with Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' )", "-> Optional[str]: \"\"\"Return the version (i.e., rev) of a repo Args: filename (str):", "with Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' ) #", "= ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description = \"Optional static", "str, repo: str) -> Optional[str]: \"\"\"Return the version (i.e., rev) of a repo", "= \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml file", "Args: filename (str): .pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]: the version of", "\" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev:", "Optional[str]: the version of the repo \"\"\" with open(filename, \"r\") as stream: pre_commit_data", "sync with poetry\\n\" \" entry: swp\\n\" \" language: system\\n\" \" files: poetry.lock\\n\" \"", "this is a rev\\n\" \" hooks:\\n\" \" - id: black\\n\" \" # another", "Optional[str]: \"\"\"Return the version (i.e., rev) of a repo Args: filename (str): .pre-commit-config.yaml", "= false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description", "\">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the modular source", "false\\n\" 'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\"", "poetry.lock\\n\" \" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\"", "CONFIG_CONTENT = ( \"repos:\\n\" \" # local hooks\\n\" \" - repo: local\\n\" \"", "hooks:\\n\" \" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \"", "'version = \"0.910\"\\n' 'description = \"Optional static typing for Python\"\\n' 'category = \"dev\"\\n'", "rev) of a repo Args: filename (str): .pre-commit-config.yaml repo (str): repo URL Returns:", "\" - id: black\\n\" \" # another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \"", "= \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the modular", "\" hooks:\\n\" \" - id: sync\\n\" \" name: sync with poetry\\n\" \" entry:", "(str): repo URL Returns: Optional[str]: the version of the repo \"\"\" with open(filename,", "stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item for item in pre_commit_data[\"repos\"] if", "'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest: simple powerful testing with", "= yaml.safe_load(stream) pre_config_repo = next( (item for item in pre_commit_data[\"repos\"] if item[\"repo\"] ==", "open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item for item", "- id: sync\\n\" \" name: sync with poetry\\n\" \" entry: swp\\n\" \" language:", "A .pre-commit-config.yaml file CONFIG_CONTENT = ( \"repos:\\n\" \" # local hooks\\n\" \" -", "id: mypy\\n\" \" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \"", "\"repos:\\n\" \" # local hooks\\n\" \" - repo: local\\n\" \" hooks:\\n\" \" -", "def get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return the version (i.e., rev) of", "false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description =", "rev: 21.5b2 # this is a rev\\n\" \" hooks:\\n\" \" - id: black\\n\"", "id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 #", "yaml.safe_load(stream) pre_config_repo = next( (item for item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo),", "rev: v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\" \" # comment\\n\" \" -", "id: black\\n\" \" # another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\"", "version (i.e., rev) of a repo Args: filename (str): .pre-commit-config.yaml repo (str): repo", "A lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n'", "language: system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \"", "- repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\" \"", "= \"0.910\"\\n' 'description = \"Optional static typing for Python\"\\n' 'category = \"dev\"\\n' \"optional", "source code checker: pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\"", "files: poetry.lock\\n\" \" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev: v0.812\\n\" \"", "\" rev: v0.812\\n\" \" hooks:\\n\" \" - id: mypy\\n\" \" # comment\\n\" \"", "- repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\" \"", "= \"the modular source code checker: pep8 pyflakes and co\"\\n' 'category = \"dev\"\\n'", "\" rev: 3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \"", "\" rev: 5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\" \" args: [--filter-files]\\n\" )", "lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name = \"mypy\"\\n' 'version = \"0.910\"\\n' 'description", "\"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version =", "= false\\n\" 'python-versions = \">=3.6\"\\n' ) # A .pre-commit-config.yaml file CONFIG_CONTENT = (", "'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n'", "\" hooks:\\n\" \" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\"", "'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\" 'name = \"pytest\"\\n' 'version = \"6.2.5\"\\n' 'description = \"pytest:", "# this is a rev\\n\" \" hooks:\\n\" \" - id: black\\n\" \" #", "in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None ) if pre_config_repo: return pre_config_repo[\"rev\"] return", "uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n' \"[[package]]\\n\"", "\" name: sync with poetry\\n\" \" entry: swp\\n\" \" language: system\\n\" \" files:", "mypy\\n\" \" # comment\\n\" \" - repo: https://github.com/pycqa/flake8\\n\" \" rev: 3.9.0\\n\" \" hooks:\\n\"", "system\\n\" \" files: poetry.lock\\n\" \" # mypy\\n\" \" - repo: https://github.com/pre-commit/mirrors-mypy\\n\" \" rev:", "= \"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional =", "- repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is a rev\\n\" \" hooks:\\n\"", "simple powerful testing with Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions =", "false\\n\" 'python-versions = \">=3.5\"\\n' \"[[package]]\\n\" 'name = \"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description =", "\" rev: 21.5b2 # this is a rev\\n\" \" hooks:\\n\" \" - id:", "(str): .pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]: the version of the repo", "a rev\\n\" \" hooks:\\n\" \" - id: black\\n\" \" # another repo\\n\" \"", "\"flake8\"\\n' 'version = \"4.0.1\"\\n' 'description = \"the modular source code checker: pep8 pyflakes", ".pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]: the version of the repo \"\"\"", "= \"Optional static typing for Python\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions", "\" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \" hooks:\\n\" \" - id: isort\\n\"", "of a repo Args: filename (str): .pre-commit-config.yaml repo (str): repo URL Returns: Optional[str]:", "\"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo = next( (item for item in", "= \"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\" 'python-versions =", "Optional import yaml # A lock file LOCK_CONTENT = ( \"[[package]]\\n\" 'name =", "'version = \"4.0.1\"\\n' 'description = \"the modular source code checker: pep8 pyflakes and", "'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The", "black\\n\" \" # another repo\\n\" \" - repo: https://github.com/pycqa/isort\\n\" \" rev: 5.10.1\\n\" \"", "\"4.0.1\"\\n' 'description = \"the modular source code checker: pep8 pyflakes and co\"\\n' 'category", "and co\"\\n' 'category = \"dev\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6\"\\n' \"[[package]]\\n\" 'name", "flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this", "\" - repo: https://github.com/psf/black\\n\" \" rev: 21.5b2 # this is a rev\\n\" \"", "the repo \"\"\" with open(filename, \"r\") as stream: pre_commit_data = yaml.safe_load(stream) pre_config_repo =", "(item for item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None ) if pre_config_repo:", "for item in pre_commit_data[\"repos\"] if item[\"repo\"] == repo), None ) if pre_config_repo: return", "3.9.0\\n\" \" hooks:\\n\" \" - id: flake8\\n\" \" args: [--max-line-length=88]\\n\" \" - repo:", "\">=3.6\"\\n' \"[[package]]\\n\" 'name = \"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The uncompromising code", "\"black\"\\n' 'version = \"21.11b1\"\\n' 'description = \"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n'", "typing import Optional import yaml # A lock file LOCK_CONTENT = ( \"[[package]]\\n\"", "isort\\n\" \" args: [--filter-files]\\n\" ) def get_repo_version(filename: str, repo: str) -> Optional[str]: \"\"\"Return", "\"The uncompromising code formatter.\"\\n' 'category = \"main\"\\n' \"optional = false\\n\" 'python-versions = \">=3.6.2\"\\n'" ]
[ "self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin = upper", "BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist =", "= talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin = upper - lower print(margin) self.assertEqual(len(margin),", "sys.path.append(os.getcwd() + \"\\\\src\") from binance_stream import BinanceStream import talib import numpy as np", "as np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream = BinanceStream() def", "hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history() upper, middle,", "def setUp(self) -> None: super().setUp() self.stream = BinanceStream() def test_history(self): hist = self.stream.return_history()", "os, sys sys.path.append(os.getcwd() + \"\\\\src\") from binance_stream import BinanceStream import talib import numpy", "200) def test_analysis(self): hist = self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2,", "super().setUp() self.stream = BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def", "setUp(self) -> None: super().setUp() self.stream = BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist)", "99, 2, 2, 0) margin = upper - lower print(margin) self.assertEqual(len(margin), len(hist)) unittest.main()", "import BinanceStream import talib import numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self) ->", "def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history()", "= BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist", "binance_stream import BinanceStream import talib import numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self)", "test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history() upper,", "print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist),", "self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history() upper, middle, lower =", "lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin = upper - lower print(margin)", "+ \"\\\\src\") from binance_stream import BinanceStream import talib import numpy as np class", "numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream = BinanceStream()", "np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream = BinanceStream() def test_history(self):", "= self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin =", "sys sys.path.append(os.getcwd() + \"\\\\src\") from binance_stream import BinanceStream import talib import numpy as", "= self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history() upper, middle, lower", "upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin = upper -", "None: super().setUp() self.stream = BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200)", "def test_analysis(self): hist = self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2,", "BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream = BinanceStream() def test_history(self): hist =", "from binance_stream import BinanceStream import talib import numpy as np class BinanceStreamTests(unittest.TestCase): def", "import talib import numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp()", "talib import numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream", "middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin = upper - lower", "unittest, os, sys sys.path.append(os.getcwd() + \"\\\\src\") from binance_stream import BinanceStream import talib import", "<reponame>iswanlun/PatternTradingBot<gh_stars>0 import unittest, os, sys sys.path.append(os.getcwd() + \"\\\\src\") from binance_stream import BinanceStream import", "\"\\\\src\") from binance_stream import BinanceStream import talib import numpy as np class BinanceStreamTests(unittest.TestCase):", "self.stream = BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist), 200) def test_analysis(self):", "class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream = BinanceStream() def test_history(self): hist", "hist = self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin", "import numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None: super().setUp() self.stream =", "self.assertEqual(len(hist), 200) def test_analysis(self): hist = self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99,", "talib.BBANDS(np.array(hist), 99, 2, 2, 0) margin = upper - lower print(margin) self.assertEqual(len(margin), len(hist))", "test_analysis(self): hist = self.stream.return_history() upper, middle, lower = talib.BBANDS(np.array(hist), 99, 2, 2, 0)", "BinanceStream import talib import numpy as np class BinanceStreamTests(unittest.TestCase): def setUp(self) -> None:", "-> None: super().setUp() self.stream = BinanceStream() def test_history(self): hist = self.stream.return_history() print(hist) self.assertEqual(len(hist),", "import unittest, os, sys sys.path.append(os.getcwd() + \"\\\\src\") from binance_stream import BinanceStream import talib" ]
[ "for i in input().split()] result = 0 if command == \"Odd\": # odd_numbers", "* len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 != 0, list_of_numbers))) *", "command == \"Even\": # even_numbers = list(filter(lambda x: x % 2 == 0,", "sum(list(filter(lambda x: x % 2 != 0, list_of_numbers))) * len(list_of_numbers) elif command ==", "# odd_numbers = list(filter(lambda x: x % 2 != 0, list_of_numbers)) # print(sum(odd_numbers)", "# even_numbers = list(filter(lambda x: x % 2 == 0, list_of_numbers)) # print(sum(even_numbers)", "print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 != 0, list_of_numbers)))", "odd_numbers = list(filter(lambda x: x % 2 != 0, list_of_numbers)) # print(sum(odd_numbers) *", "len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 == 0, list_of_numbers))) * len(list_of_numbers)", "= sum(list(filter(lambda x: x % 2 != 0, list_of_numbers))) * len(list_of_numbers) elif command", "# print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 != 0,", "result = sum(list(filter(lambda x: x % 2 == 0, list_of_numbers))) * len(list_of_numbers) print(result)", "== \"Odd\": # odd_numbers = list(filter(lambda x: x % 2 != 0, list_of_numbers))", "x % 2 == 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda", "% 2 != 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x:", "2 == 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x", "0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2", "len(list_of_numbers) elif command == \"Even\": # even_numbers = list(filter(lambda x: x % 2", "= [int(i) for i in input().split()] result = 0 if command == \"Odd\":", "i in input().split()] result = 0 if command == \"Odd\": # odd_numbers =", "\"Even\": # even_numbers = list(filter(lambda x: x % 2 == 0, list_of_numbers)) #", "= 0 if command == \"Odd\": # odd_numbers = list(filter(lambda x: x %", "even_numbers = list(filter(lambda x: x % 2 == 0, list_of_numbers)) # print(sum(even_numbers) *", "list(filter(lambda x: x % 2 == 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result", "print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 == 0, list_of_numbers)))", "* len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 == 0, list_of_numbers))) *", "0, list_of_numbers))) * len(list_of_numbers) elif command == \"Even\": # even_numbers = list(filter(lambda x:", "\"Odd\": # odd_numbers = list(filter(lambda x: x % 2 != 0, list_of_numbers)) #", "== \"Even\": # even_numbers = list(filter(lambda x: x % 2 == 0, list_of_numbers))", "!= 0, list_of_numbers))) * len(list_of_numbers) elif command == \"Even\": # even_numbers = list(filter(lambda", "list_of_numbers = [int(i) for i in input().split()] result = 0 if command ==", "list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 ==", "result = 0 if command == \"Odd\": # odd_numbers = list(filter(lambda x: x", "result = sum(list(filter(lambda x: x % 2 != 0, list_of_numbers))) * len(list_of_numbers) elif", "len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 != 0, list_of_numbers))) * len(list_of_numbers)", "2 != 0, list_of_numbers))) * len(list_of_numbers) elif command == \"Even\": # even_numbers =", "# print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 == 0,", "in input().split()] result = 0 if command == \"Odd\": # odd_numbers = list(filter(lambda", "x: x % 2 == 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result =", "0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2", "input().split()] result = 0 if command == \"Odd\": # odd_numbers = list(filter(lambda x:", "input() list_of_numbers = [int(i) for i in input().split()] result = 0 if command", "= list(filter(lambda x: x % 2 != 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers))", "= list(filter(lambda x: x % 2 == 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers))", "x % 2 != 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda", "[int(i) for i in input().split()] result = 0 if command == \"Odd\": #", "= input() list_of_numbers = [int(i) for i in input().split()] result = 0 if", "list(filter(lambda x: x % 2 != 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result", "x: x % 2 != 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result =", "x: x % 2 != 0, list_of_numbers))) * len(list_of_numbers) elif command == \"Even\":", "list_of_numbers))) * len(list_of_numbers) elif command == \"Even\": # even_numbers = list(filter(lambda x: x", "* len(list_of_numbers) elif command == \"Even\": # even_numbers = list(filter(lambda x: x %", "command = input() list_of_numbers = [int(i) for i in input().split()] result = 0", "== 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x %", "list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x % 2 !=", "!= 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x %", "elif command == \"Even\": # even_numbers = list(filter(lambda x: x % 2 ==", "% 2 != 0, list_of_numbers))) * len(list_of_numbers) elif command == \"Even\": # even_numbers", "command == \"Odd\": # odd_numbers = list(filter(lambda x: x % 2 != 0,", "if command == \"Odd\": # odd_numbers = list(filter(lambda x: x % 2 !=", "% 2 == 0, list_of_numbers)) # print(sum(even_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x:", "2 != 0, list_of_numbers)) # print(sum(odd_numbers) * len(list_of_numbers)) result = sum(list(filter(lambda x: x", "x % 2 != 0, list_of_numbers))) * len(list_of_numbers) elif command == \"Even\": #", "0 if command == \"Odd\": # odd_numbers = list(filter(lambda x: x % 2" ]
[ "DateTimeField from wx.app import database from wx.models.station import Station class Report(database.Model): station =", "import database from wx.models.station import Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp", "from peewee import ForeignKeyField, DateTimeField from wx.app import database from wx.models.station import Station", "database from wx.models.station import Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp =", "class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp = DateTimeField(default=datetime.now) class Meta: order_by =", "datetime import datetime from peewee import ForeignKeyField, DateTimeField from wx.app import database from", "Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp = DateTimeField(default=datetime.now) class Meta: order_by = ('-timestamp',)", "datetime from peewee import ForeignKeyField, DateTimeField from wx.app import database from wx.models.station import", "ForeignKeyField, DateTimeField from wx.app import database from wx.models.station import Station class Report(database.Model): station", "import Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp = DateTimeField(default=datetime.now) class Meta:", "peewee import ForeignKeyField, DateTimeField from wx.app import database from wx.models.station import Station class", "import ForeignKeyField, DateTimeField from wx.app import database from wx.models.station import Station class Report(database.Model):", "from wx.models.station import Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp = DateTimeField(default=datetime.now)", "from datetime import datetime from peewee import ForeignKeyField, DateTimeField from wx.app import database", "import datetime from peewee import ForeignKeyField, DateTimeField from wx.app import database from wx.models.station", "wx.models.station import Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp = DateTimeField(default=datetime.now) class", "from wx.app import database from wx.models.station import Station class Report(database.Model): station = ForeignKeyField(Station,", "Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports') timestamp = DateTimeField(default=datetime.now) class Meta: order_by", "wx.app import database from wx.models.station import Station class Report(database.Model): station = ForeignKeyField(Station, related_name='reports')" ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "= is_public self.__service_url = url self.__timeout = timeout http = httplib2.Http(timeout=timeout) def get_text(self,", "jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result = requests.get(url_base,", "KIND, either express or implied. # See the License for the specific language", "provided as-is, # without warranty or representation for any use or purpose.# #", "Unless required by applicable law or agreed to in writing, software # distributed", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "LLC. This software is provided as-is, # without warranty or representation for any", "json import os import urllib import requests class RestHelper(object): def __init__(self, url, timeout=30,", "http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer", "get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers',", "License. # You may obtain a copy of the License at # #", "# import sys def main(argv): pass if __name__ == '__main__': main(sys.argv) import httplib2", "in [\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for", "params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in", "self.__is_public = is_public self.__service_url = url self.__timeout = timeout http = httplib2.Http(timeout=timeout) def", "logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] =", "return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code,", "+ jwt # print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers = headers)", "# # Copyright 2019 Google LLC # # Licensed under the Apache License,", "= 'Bearer ' + jwt # print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers)", "law or agreed to in writing, software # distributed under the License is", "specific language governing permissions and # limitations under the License. # # Copyright", "the License. # # Copyright 2019 Google LLC. This software is provided as-is,", "the License for the specific language governing permissions and # limitations under the", "'Bearer ' + jwt # print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) #", "# Copyright 2019 Google LLC. This software is provided as-is, # without warranty", "result.text) return result.text raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text))", "Copyright 2019 Google LLC. This software is provided as-is, # without warranty or", "compliance with the License. # You may obtain a copy of the License", "get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer", "result.text raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod def", "RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log = 1 self.__debug = True self.__is_public", "{} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={}", "logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text)) return", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "__init__(self, url, timeout=30, is_public=True): self.__log = 1 self.__debug = True self.__is_public = is_public", "timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test',", "this file except in compliance with the License. # You may obtain a", "headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code)", "purpose.# # import sys def main(argv): pass if __name__ == '__main__': main(sys.argv) import", "HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30):", "jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers',", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP - {} -", "you may not use this file except in compliance with the License. #", "as-is, # without warranty or representation for any use or purpose.# # import", "main(argv): pass if __name__ == '__main__': main(sys.argv) import httplib2 import logging import json", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "== '__main__': main(sys.argv) import httplib2 import logging import json import os import urllib", "= httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer '", "Google LLC # # Licensed under the Apache License, Version 2.0 (the \"License\");", "ANY KIND, either express or implied. # See the License for the specific", "def __init__(self, url, timeout=30, is_public=True): self.__log = 1 self.__debug = True self.__is_public =", "= timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization']", "timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization']", "any use or purpose.# # import sys def main(argv): pass if __name__ ==", "is_public self.__service_url = url self.__timeout = timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None,", "str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text raise Exception(\"Respose Failure for HTTP", "result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text))", "raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base,", "in compliance with the License. # You may obtain a copy of the", "jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text raise Exception(\"Respose Failure", "- {} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start')", "Error') raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text)) return None", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the", "use this file except in compliance with the License. # You may obtain", "pass if __name__ == '__main__': main(sys.argv) import httplib2 import logging import json import", "# print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "print('get_test', result.text) return result.text raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code,", "not use this file except in compliance with the License. # You may", "' + jwt # print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base,", "Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0", "= headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test',", "language governing permissions and # limitations under the License. # # Copyright 2019", "and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text raise Exception(\"Respose Failure for", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "'__main__': main(sys.argv) import httplib2 import logging import json import os import urllib import", "return result.text raise Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod", "2019 Google LLC. This software is provided as-is, # without warranty or representation", "warranty or representation for any use or purpose.# # import sys def main(argv):", "print('get_text') if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result", "urllib import requests class RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log = 1", "under the License. # # Copyright 2019 Google LLC. This software is provided", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "url self.__timeout = timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: #", "headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt:", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "for any use or purpose.# # import sys def main(argv): pass if __name__", "import sys def main(argv): pass if __name__ == '__main__': main(sys.argv) import httplib2 import", "import httplib2 import logging import json import os import urllib import requests class", "# print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code", "import requests class RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log = 1 self.__debug", "@staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection):", "OF ANY KIND, either express or implied. # See the License for the", "Google LLC. This software is provided as-is, # without warranty or representation for", "logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers)", "2.0 (the \"License\"); # you may not use this file except in compliance", "jwt # print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers = headers) #", "headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text)", "# you may not use this file except in compliance with the License.", "and # limitations under the License. # # Copyright 2019 Google LLC. This", "without warranty or representation for any use or purpose.# # import sys def", "use or purpose.# # import sys def main(argv): pass if __name__ == '__main__':", "def get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer ' + jwt", "jwt # print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if", "for the specific language governing permissions and # limitations under the License. #", "= requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]:", "agreed to in writing, software # distributed under the License is distributed on", "self.__timeout = timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if jwt:", "collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params))", "= 'Bearer ' + jwt # print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout,", "# # Copyright 2019 Google LLC. This software is provided as-is, # without", "import json import os import urllib import requests class RestHelper(object): def __init__(self, url,", "= url self.__timeout = timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text')", "True self.__is_public = is_public self.__service_url = url self.__timeout = timeout http = httplib2.Http(timeout=timeout)", "requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: #", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP Error') raise", "1 self.__debug = True self.__is_public = is_public self.__service_url = url self.__timeout = timeout", "self.__service_url = url self.__timeout = timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}):", "permissions and # limitations under the License. # # Copyright 2019 Google LLC.", "timeout=30, is_public=True): self.__log = 1 self.__debug = True self.__is_public = is_public self.__service_url =", "# print('get_test', result.text) return result.text raise Exception(\"Respose Failure for HTTP - {} -", "headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers)", "or representation for any use or purpose.# # import sys def main(argv): pass", "- {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for", "(the \"License\"); # you may not use this file except in compliance with", "# limitations under the License. # # Copyright 2019 Google LLC. This software", "sys def main(argv): pass if __name__ == '__main__': main(sys.argv) import httplib2 import logging", "headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if", "in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer ' + jwt", "# without warranty or representation for any use or purpose.# # import sys", "httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer ' +", "+ jwt # print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text)))", "<filename>tests/artifacts/drivers/simple_hello/http_rest_helper.py #!/usr/bin/python # # Copyright 2019 Google LLC # # Licensed under the", "# # Unless required by applicable law or agreed to in writing, software", "main(sys.argv) import httplib2 import logging import json import os import urllib import requests", "import os import urllib import requests class RestHelper(object): def __init__(self, url, timeout=30, is_public=True):", "for index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer '", "print('get_test', result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP - {}", "express or implied. # See the License for the specific language governing permissions", "import urllib import requests class RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log =", "get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] = 'Bearer ' + jwt #", "len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text raise", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "#!/usr/bin/python # # Copyright 2019 Google LLC # # Licensed under the Apache", "by applicable law or agreed to in writing, software # distributed under the", "headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout,", "LLC # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "software is provided as-is, # without warranty or representation for any use or", "result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in", "import logging import json import os import urllib import requests class RestHelper(object): def", "Exception(\"Respose Failure for HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection,", "timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]:", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "either express or implied. # See the License for the specific language governing", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "def main(argv): pass if __name__ == '__main__': main(sys.argv) import httplib2 import logging import", "url, timeout=30, is_public=True): self.__log = 1 self.__debug = True self.__is_public = is_public self.__service_url", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "or purpose.# # import sys def main(argv): pass if __name__ == '__main__': main(sys.argv)", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "= True self.__is_public = is_public self.__service_url = url self.__timeout = timeout http =", "# Copyright 2019 Google LLC # # Licensed under the Apache License, Version", "file except in compliance with the License. # You may obtain a copy", "os import urllib import requests class RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log", "requests.get(url_base, params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code)", "if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP Error')", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "result = requests.get(url_base, params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code", "'Bearer ' + jwt # print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers", "License for the specific language governing permissions and # limitations under the License.", "len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "class RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log = 1 self.__debug = True", "httplib2 import logging import json import os import urllib import requests class RestHelper(object):", "self.__debug = True self.__is_public = is_public self.__service_url = url self.__timeout = timeout http", "def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in enumerate(collection): get_params.update(c)", "in [\"200\"]: # print('get_test', result.text) return result.text raise Exception(\"Respose Failure for HTTP -", "the License. # You may obtain a copy of the License at #", "logging import json import os import urllib import requests class RestHelper(object): def __init__(self,", "is provided as-is, # without warranty or representation for any use or purpose.#", "to in writing, software # distributed under the License is distributed on an", "if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text raise Exception(\"Respose", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "requests class RestHelper(object): def __init__(self, url, timeout=30, is_public=True): self.__log = 1 self.__debug =", "if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result =", "the specific language governing permissions and # limitations under the License. # #", "License. # # Copyright 2019 Google LLC. This software is provided as-is, #", "if __name__ == '__main__': main(sys.argv) import httplib2 import logging import json import os", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "governing permissions and # limitations under the License. # # Copyright 2019 Google", "implied. # See the License for the specific language governing permissions and #", "limitations under the License. # # Copyright 2019 Google LLC. This software is", "headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result = requests.get(url_base, params=get_params,", "\"License\"); # you may not use this file except in compliance with the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result = requests.get(self.__service_url,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "required by applicable law or agreed to in writing, software # distributed under", "is_public=True): self.__log = 1 self.__debug = True self.__is_public = is_public self.__service_url = url", "headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text)", "' + jwt # print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result,", "Failure for HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None,", "logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text", "print('headers', headers) result = requests.get(self.__service_url, timeout=self.__timeout, headers=headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and", "applicable law or agreed to in writing, software # distributed under the License", "# print('get_test', result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP -", "[\"200\"]: # print('get_test', result.text) return result.text raise Exception(\"Respose Failure for HTTP - {}", "This software is provided as-is, # without warranty or representation for any use", "print('headers', headers) result = requests.get(url_base, params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text)))", "__name__ == '__main__': main(sys.argv) import httplib2 import logging import json import os import", "get_params)) if jwt: headers['Authorization'] = 'Bearer ' + jwt # print('headers', headers) result", "or agreed to in writing, software # distributed under the License is distributed", "enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer ' + jwt #", "or implied. # See the License for the specific language governing permissions and", "representation for any use or purpose.# # import sys def main(argv): pass if", "and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "# logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and str(result.status_code) in [\"200\"]: # print('get_test', result.text) return", "timeout http = httplib2.Http(timeout=timeout) def get_text(self, jwt=None, headers={}): print('get_text') if jwt: headers['Authorization'] =", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "for HTTP - {} - {}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={},", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "with the License. # You may obtain a copy of the License at", "self.__log = 1 self.__debug = True self.__is_public = is_public self.__service_url = url self.__timeout", "{}\".format(result.status_code, result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c", "result.text)) @staticmethod def call_with_sequence(url_base, collection, jwt=None, headers={}, timeout=30): logging.info('call_with_sequence:start') get_params={} for index,c in", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "index,c in enumerate(collection): get_params.update(c) logging.info('call_with_sequence:base[{}]:params[{}]'.format(url_base, get_params)) if jwt: headers['Authorization'] = 'Bearer ' +", "str(result.status_code) in [\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure", "[\"200\"]: # print('get_test', result.text) return result.text logging.error('HTTP Error') raise Exception(\"Respose Failure for HTTP", "in writing, software # distributed under the License is distributed on an \"AS", "= 1 self.__debug = True self.__is_public = is_public self.__service_url = url self.__timeout =", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "= requests.get(url_base, params=get_params, timeout=timeout, headers = headers) # logging.info('process_file_nlp:results-[{}]-[{}]'.format(result, len(result.text))) if result.status_code and" ]
[ "= response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic", "import Api from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event object", "response.get('name') self.month = response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type')", "__init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month') self.year", "# Copyright: This module is owned by First Street Foundation # Internal Imports", "HistoricSummary(Api): \"\"\"Creates a Historic Summary object given a response Args: response (JSON): A", "module is owned by First Street Foundation # Internal Imports from firststreet.models.api import", "Args: response (JSON): A JSON response received from the API \"\"\" def __init__(self,", "\"\"\"Creates a Historic Summary object given a response Args: response (JSON): A JSON", "is owned by First Street Foundation # Internal Imports from firststreet.models.api import Api", "Street Foundation # Internal Imports from firststreet.models.api import Api from firststreet.models.geometry import Geometry", "import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event object given a response Args:", "response received from the API \"\"\" def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId'))", "This module is owned by First Street Foundation # Internal Imports from firststreet.models.api", "JSON response received from the API \"\"\" def __init__(self, response): super().__init__(response) self.fsid =", "Summary object given a response Args: response (JSON): A JSON response received from", "given a response Args: response (JSON): A JSON response received from the API", "JSON response received from the API \"\"\" def __init__(self, response): super().__init__(response) self.eventId =", "= response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary object given", "self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class", "response received from the API \"\"\" def __init__(self, response): super().__init__(response) self.fsid = str(response.get('fsid'))", "Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary object given a response Args: response", "the API \"\"\" def __init__(self, response): super().__init__(response) self.fsid = str(response.get('fsid')) self.historic = response.get('historic')", "class HistoricSummary(Api): \"\"\"Creates a Historic Summary object given a response Args: response (JSON):", "firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event object given a response", "response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates", "firststreet.models.api import Api from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event", "response Args: response (JSON): A JSON response received from the API \"\"\" def", "by First Street Foundation # Internal Imports from firststreet.models.api import Api from firststreet.models.geometry", "A JSON response received from the API \"\"\" def __init__(self, response): super().__init__(response) self.fsid", "Copyright: This module is owned by First Street Foundation # Internal Imports from", "<NAME> <<EMAIL>> # Copyright: This module is owned by First Street Foundation #", "Event object given a response Args: response (JSON): A JSON response received from", "a Historic Summary object given a response Args: response (JSON): A JSON response", "self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month') self.year = response.get('year') self.returnPeriod", "= response.get('name') self.month = response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type =", "str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod')", "Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event object given a response Args: response", "self.type = response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a", "received from the API \"\"\" def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name", "\"\"\"Creates a Historic Event object given a response Args: response (JSON): A JSON", "super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month') self.year = response.get('year')", "= Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary object given a response Args:", "the API \"\"\" def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name')", "class HistoricEvent(Api): \"\"\"Creates a Historic Event object given a response Args: response (JSON):", "Historic Summary object given a response Args: response (JSON): A JSON response received", "self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties') self.geometry", "Historic Event object given a response Args: response (JSON): A JSON response received", "= response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties') self.geometry =", "self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary object", "response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary object given a", "# Internal Imports from firststreet.models.api import Api from firststreet.models.geometry import Geometry class HistoricEvent(Api):", "= str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month') self.year = response.get('year') self.returnPeriod =", "owned by First Street Foundation # Internal Imports from firststreet.models.api import Api from", "Author: <NAME> <<EMAIL>> # Copyright: This module is owned by First Street Foundation", "a Historic Event object given a response Args: response (JSON): A JSON response", "Internal Imports from firststreet.models.api import Api from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates", "API \"\"\" def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month", "response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry'))", "\"\"\" def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month =", "(JSON): A JSON response received from the API \"\"\" def __init__(self, response): super().__init__(response)", "from the API \"\"\" def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name =", "from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event object given a", "response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary", "from firststreet.models.api import Api from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic", "# Author: <NAME> <<EMAIL>> # Copyright: This module is owned by First Street", "A JSON response received from the API \"\"\" def __init__(self, response): super().__init__(response) self.eventId", "self.name = response.get('name') self.month = response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type", "response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties')", "<<EMAIL>> # Copyright: This module is owned by First Street Foundation # Internal", "a response Args: response (JSON): A JSON response received from the API \"\"\"", "HistoricEvent(Api): \"\"\"Creates a Historic Event object given a response Args: response (JSON): A", "from the API \"\"\" def __init__(self, response): super().__init__(response) self.fsid = str(response.get('fsid')) self.historic =", "object given a response Args: response (JSON): A JSON response received from the", "self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api): \"\"\"Creates a Historic Summary object given a response", "response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month') self.year =", "self.month = response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties", "received from the API \"\"\" def __init__(self, response): super().__init__(response) self.fsid = str(response.get('fsid')) self.historic", "First Street Foundation # Internal Imports from firststreet.models.api import Api from firststreet.models.geometry import", "= response.get('returnPeriod') self.type = response.get('type') self.properties = response.get('properties') self.geometry = Geometry(response.get('geometry')) class HistoricSummary(Api):", "def __init__(self, response): super().__init__(response) self.eventId = str(response.get('eventId')) self.name = response.get('name') self.month = response.get('month')", "Api from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a Historic Event object given", "Imports from firststreet.models.api import Api from firststreet.models.geometry import Geometry class HistoricEvent(Api): \"\"\"Creates a", "response (JSON): A JSON response received from the API \"\"\" def __init__(self, response):", "= response.get('month') self.year = response.get('year') self.returnPeriod = response.get('returnPeriod') self.type = response.get('type') self.properties =", "Foundation # Internal Imports from firststreet.models.api import Api from firststreet.models.geometry import Geometry class" ]
[ "to convert between phi and RD n_rounds(int): Number of rounds for the tournament", "opponents played opponent_phis (list(float)): The phi ratings of the opponents played scores (list(inte)):", "tensor_output (torch.Tensor): M tensor output of the discriminator (M samples,) probability of being", "each sample Returns: acc (float): the probability accuracy of the output vs. the", "rating based on game outcomes delta = v * np.sum(g * (scores -", "np.sum(g * (scores - E)) new_rating = norm_val * new_mu + starting_rating new_rd", "convert between phi and RD Returns: (new_mu, new_phi, new_rating, new_rd) (float, float, float,", "where 1 row is for 1 refiner (respectively for discriminator). ''' n_refiners =", "Pandas DataFrame for metadata-ratings where 1 row is for 1 refiner (respectively for", "[]} # Perform matches between each pair (R,D) for id_R, R in zip(refiner_ids,", "percent accuracy of the output, using the labels. Note that the sigmoid is", "def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): '''", "the winner Returns: A tuple a of Pandas DataFrames... A Pandas DataFrame for", "ratings = {} for id in ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd,", "for 1 refiner (respectively for discriminator). ''' n_refiners = len(refiners) ids = np.arange(n_refiners", "played scores (list(inte)): The scores of the games played, 1 indicating a win,", "using the Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list of refiners discriminators", "to determine the winner of the match discriminator_win_thresh: The accuracy of the discriminator", "/ (1 + 3 * opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure", "part of the Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor output of the", "just running data through refiner and discrim. like why not just do that", "labels for each sample Returns: acc (float): the probability accuracy of the output", "new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings # Get refiner and discriminator with", "in zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids, discriminators): # RODD - ?...why", "The former mu rating old_phi (float): The former phi rating opponent_mus (list(float)): The", "n_rounds(int): Number of rounds for the tournament matches_per_pairing(int): The number of matches per", "Returns: A tuple a of Pandas DataFrames... A Pandas DataFrame for metadata-ratings where", "with more data? for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real", "np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) # Get", "of the opponents played scores (list(inte)): The scores of the games played, 1", "is E = 1.0 / (1 + np.exp(-1 * g * (old_mu -", "tensor_labels): ''' Calculate the percent accuracy of the output, using the labels. Note", "device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined =", "discriminator accuracy on real and refined data d_pred_real = D(real) acc_real = calc_acc(d_pred_real,", "scores for the refiners and discriminators new_ratings = ratings.copy() for id in ids:", "new_phi**2 * np.sum(g * (scores - E)) new_rating = norm_val * new_mu +", "rating that players were initialized to norm_val (float): The normalization value used to", "players were initialized to norm_val (float): The normalization value used to convert between", "The rating that players were initialized to starting_RD (float): The RD that players", "id_D, D in zip(discriminator_ids, discriminators): # RODD - ?...why do we need multiple", "np import pandas as pd import torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500,", "mu ratings of the opponents played opponent_phis (list(float)): The phi ratings of the", "= {} for id in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores':", "data? for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds],", "# A score of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1)", "discriminator (M samples,) probability of being class '1' tensor_labels (torch.Tensor): M tensor true", "to training starting_rating (float): The rating that players were initialized to starting_RD (float):", "= 1.0 / (1 + 3 * opponent_phis**2 / np.pi**2) ** 0.5 #", "opponents played scores (list(inte)): The scores of the games played, 1 indicating a", "Glicko2 calculation Parameters: old_mu (float): The former mu rating old_phi (float): The former", "v = np.sum(g**2 * E * (1 - E)) ** -1 # Estimated", "of refiners discriminators (list(torch.nn)): list of discriminators validation_data (simganData): SimGAN dataset train_config (dict):", "device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a", "explain/figure out what g is E = 1.0 / (1 + np.exp(-1 *", "new_rating, new_rd) (float, float, float, float): The updated Glicko values for the player", "matches between each pair (R,D) for id_R, R in zip(refiner_ids, refiners): for id_D,", "# RODD - ?...why do we need multiple matches? why not just change", "The normalization value used to convert between phi and RD n_rounds(int): Number of", "of the discriminator needed for the discriminator to be declared the winner Returns:", "just do that once but with more data? for match in range(matches_per_pairing): real_inds", "tensor output of the discriminator (M samples,) probability of being class '1' tensor_labels", "torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores,", "1.0 / (1 + np.exp(-1 * g * (old_mu - opponent_mus))) # Probability", "new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings # Get refiner and", "refiner (respectively for discriminator). ''' n_refiners = len(refiners) ids = np.arange(n_refiners + len(discriminators))", "?...why do we need multiple matches? why not just change samples to samples_per_match*matches_per_pairing", "tournament to rank refiners + discriminators for simgan ''' import numpy as np", "not just do that once but with more data? for match in range(matches_per_pairing):", "refiners discriminators (list(torch.nn)): list of discriminators validation_data (simganData): SimGAN dataset train_config (dict): dictionary", "change samples to samples_per_match*matches_per_pairing # ...like it's just running data through refiner and", "norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings # Get refiner", "the winner of the match discriminator_win_thresh: The accuracy of the discriminator needed for", "in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds", "more data? for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real =", "the player ''' g = 1.0 / (1 + 3 * opponent_phis**2 /", "device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in range(n_rounds): # instantiate", "(list(inte)): The scores of the games played, 1 indicating a win, 0 indicating", "refiners and discriminators new_ratings = ratings.copy() for id in ids: results = match_results[id]", "normalization value used to convert between phi and RD n_rounds(int): Number of rounds", "matches? why not just change samples to samples_per_match*matches_per_pairing # ...like it's just running", "float, float): The updated Glicko values for the player ''' g = 1.0", "starting_rating (float): The rating that players were initialized to starting_RD (float): The RD", "per refiner/discriminator pairing to determine the overall winner samples_per_match(int): The number of samples", "players were initialized to starting_RD (float): The RD that players were initialized to", "labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the average accuracy", "the percent accuracy of the output, using the labels. Note that the sigmoid", "of samples per match to determine the winner of the match discriminator_win_thresh: The", "ids = np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings =", "matches per refiner/discriminator pairing to determine the overall winner samples_per_match(int): The number of", "as part of the Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor output of", "former phi rating opponent_mus (list(float)): The mu ratings of the opponents played opponent_phis", "* g * (old_mu - opponent_mus))) # Probability of player winning each match", "information related to training starting_rating (float): The rating that players were initialized to", "the average accuracy of the discriminator avg_acc = (acc_real + acc_refined) / 2.0", "= torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu,", "discriminator avg_acc = (acc_real + acc_refined) / 2.0 # Add this match's results", "starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return", "value used to convert between phi and RD n_rounds(int): Number of rounds for", "pd import torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5,", "just change samples to samples_per_match*matches_per_pairing # ...like it's just running data through refiner", "output of the discriminator (M samples,) probability of being class '1' tensor_labels (torch.Tensor):", "discriminators new_ratings = ratings.copy() for id in ids: results = match_results[id] glicko_calculations =", "for the discriminator # A score of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0)", "per match to determine the winner of the match discriminator_win_thresh: The accuracy of", "RD that players were initialized to norm_val (float): The normalization value used to", "discriminator to be declared the winner Returns: A tuple a of Pandas DataFrames...", "and discriminators using the Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list of", "improvement in rating new_phi = 1 / (1/old_phi**2 + 1/v) ** 0.5 new_mu", "np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {} for", "ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the", "true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach())", "len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO", "winner of the match discriminator_win_thresh: The accuracy of the discriminator needed for the", "discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids]", "Number of rounds for the tournament matches_per_pairing(int): The number of matches per refiner/discriminator", "the output, using the labels. Note that the sigmoid is already calculated as", "** 0.5 new_mu = old_mu + new_phi**2 * np.sum(g * (scores - E))", "acc (float): the probability accuracy of the output vs. the true labels '''", "with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return", "refiners and discriminators using the Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list", "for metadata-ratings where 1 row is for 1 refiner (respectively for discriminator). '''", "winning each match v = np.sum(g**2 * E * (1 - E)) **", "of the games played, 1 indicating a win, 0 indicating a loss starting_rating", "The former phi rating opponent_mus (list(float)): The mu ratings of the opponents played", "real and refined data d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined =", "# TODO: explain/figure out what g is E = 1.0 / (1 +", "the match discriminator_win_thresh: The accuracy of the discriminator needed for the discriminator to", "discriminators using the Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list of refiners", "samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) # Get discriminator", "and discriminators new_ratings = ratings.copy() for id in ids: results = match_results[id] glicko_calculations", "# ...like it's just running data through refiner and discrim. like why not", "played opponent_phis (list(float)): The phi ratings of the opponents played scores (list(inte)): The", "list of refiners discriminators (list(torch.nn)): list of discriminators validation_data (simganData): SimGAN dataset train_config", "values for the player ''' g = 1.0 / (1 + 3 *", "multiple matches? why not just change samples to samples_per_match*matches_per_pairing # ...like it's just", "validation_data.simulated_raw for rnd in range(n_rounds): # instantiate match results match_results = {} for", "match discriminator_win_thresh: The accuracy of the discriminator needed for the discriminator to be", "on real and refined data d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined", "Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >=", "to starting_RD (float): The RD that players were initialized to norm_val (float): The", "of the Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor output of the discriminator", "output vs. the true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred ==", "(list(float)): The phi ratings of the opponents played scores (list(inte)): The scores of", "match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the refiners and discriminators", "is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the", "Parameters: refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)): list of discriminators validation_data (simganData):", "# Find the average accuracy of the discriminator avg_acc = (acc_real + acc_refined)", "using the labels. Note that the sigmoid is already calculated as part of", "calculation Parameters: old_mu (float): The former mu rating old_phi (float): The former phi", "Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner and discriminator from the list of", "winner samples_per_match(int): The number of samples per match to determine the winner of", "indicating a loss starting_rating (float): The rating that players were initialized to norm_val", "D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) #", "* new_mu + starting_rating new_rd = norm_val * new_phi return new_mu, new_phi, new_rating,", "to norm_val (float): The normalization value used to convert between phi and RD", "match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An accuracy greater than or", "http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new glicko values for the", "''' g = 1.0 / (1 + 3 * opponent_phis**2 / np.pi**2) **", "= {} for id in ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu':", "???? Calculate and return the new glicko values for the player using Glicko2", "new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings # Get refiner and discriminator with best", "ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val)", "discriminators): # RODD - ?...why do we need multiple matches? why not just", "is considered a win for the discriminator # A score of 1 is", "discriminators (list(torch.nn)): list of discriminators validation_data (simganData): SimGAN dataset train_config (dict): dictionary holding", "row is for 1 refiner (respectively for discriminator). ''' n_refiners = len(refiners) ids", "new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings # Get refiner and discriminator", "Calculate the percent accuracy of the output, using the labels. Note that the", "...like it's just running data through refiner and discrim. like why not just", "out what g is E = 1.0 / (1 + np.exp(-1 * g", "= v * np.sum(g * (scores - E)) # Estimated improvement in rating", "(float): The RD that players were initialized to norm_val (float): The normalization value", "R(simulated) # Get discriminator accuracy on real and refined data d_pred_real = D(real)", "acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source ????", "being class '1' tensor_labels (torch.Tensor): M tensor true labels for each sample Returns:", "-1 # Estimated variance of the player's rating based on game outcomes delta", "opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate", "labels. Note that the sigmoid is already calculated as part of the Discriminator", "= new_ratings # Get refiner and discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T", "samples_per_match*matches_per_pairing # ...like it's just running data through refiner and discrim. like why", "calculated as part of the Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor output", "rounds for the tournament matches_per_pairing(int): The number of matches per refiner/discriminator pairing to", "The mu ratings of the opponents played opponent_phis (list(float)): The phi ratings of", "Parameters: tensor_output (torch.Tensor): M tensor output of the discriminator (M samples,) probability of", "accuracy greater than or equal to this threshold is considered a win for", "in ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating,", "= torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float,", "+ len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {} for id", "all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in range(n_rounds): # instantiate match", "average accuracy of the discriminator avg_acc = (acc_real + acc_refined) / 2.0 #", "new_mu + starting_rating new_rd = norm_val * new_phi return new_mu, new_phi, new_rating, new_rd", "accuracy of the output, using the labels. Note that the sigmoid is already", "determine the overall winner samples_per_match(int): The number of samples per match to determine", "once but with more data? for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match,", "equal to this threshold is considered a win for the discriminator # A", "games played, 1 indicating a win, 0 indicating a loss starting_rating (float): The", "to convert between phi and RD Returns: (new_mu, new_phi, new_rating, new_rd) (float, float,", "np.sum(g * (scores - E)) # Estimated improvement in rating new_phi = 1", "D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the average accuracy of the discriminator", "and discriminator from the list of refiners and discriminators using the Tournament Skill", "The RD that players were initialized to norm_val (float): The normalization value used", "calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ????", "{'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device)", "= ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy", "overall winner samples_per_match(int): The number of samples per match to determine the winner", "samples to samples_per_match*matches_per_pairing # ...like it's just running data through refiner and discrim.", "a loss starting_rating (float): The rating that players were initialized to norm_val (float):", "of rounds for the tournament matches_per_pairing(int): The number of matches per refiner/discriminator pairing", "M tensor true labels for each sample Returns: acc (float): the probability accuracy", "pandas as pd import torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178,", "validation_data (simganData): SimGAN dataset train_config (dict): dictionary holding information related to training starting_rating", "discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy of the output, using", "return the new glicko values for the player using Glicko2 calculation Parameters: old_mu", "Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)): list of", "range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds =", "related to training starting_rating (float): The rating that players were initialized to starting_RD", "what g is E = 1.0 / (1 + np.exp(-1 * g *", "refiner and discrim. like why not just do that once but with more", "/ np.pi**2) ** 0.5 # TODO: explain/figure out what g is E =", "in zip(discriminator_ids, discriminators): # RODD - ?...why do we need multiple matches? why", "old_phi (float): The former phi rating opponent_mus (list(float)): The mu ratings of the", "Calculate and return the new glicko values for the player using Glicko2 calculation", "device=device) refined = R(simulated) # Get discriminator accuracy on real and refined data", "of refiners and discriminators using the Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)):", "player winning each match v = np.sum(g**2 * E * (1 - E))", "float): The updated Glicko values for the player ''' g = 1.0 /", "(float): The rating that players were initialized to norm_val (float): The normalization value", "''' Calculate the percent accuracy of the output, using the labels. Note that", "+ discriminators for simgan ''' import numpy as np import pandas as pd", "the overall winner samples_per_match(int): The number of samples per match to determine the", "TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new glicko", "def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy of the output, using the", "best refiner and discriminator from the list of refiners and discriminators using the", "np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False)", "rating new_phi = 1 / (1/old_phi**2 + 1/v) ** 0.5 new_mu = old_mu", "accuracy of the discriminator avg_acc = (acc_real + acc_refined) / 2.0 # Add", "scores of the games played, 1 indicating a win, 0 indicating a loss", "acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the average accuracy of the discriminator avg_acc", "sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated)", "ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {} for id in ids: ratings[id] =", "= 1 / (1/old_phi**2 + 1/v) ** 0.5 new_mu = old_mu + new_phi**2", "why not just do that once but with more data? for match in", "simgan ''' import numpy as np import pandas as pd import torch def", "than or equal to this threshold is considered a win for the discriminator", "(torch.Tensor): M tensor true labels for each sample Returns: acc (float): the probability", "to samples_per_match*matches_per_pairing # ...like it's just running data through refiner and discrim. like", "score of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update", "????? Find the best refiner and discriminator from the list of refiners and", "for id in ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi':", "for id_D, D in zip(discriminator_ids, discriminators): # RODD - ?...why do we need", "ratings of the opponents played scores (list(inte)): The scores of the games played,", "glicko_calculations ratings = new_ratings # Get refiner and discriminator with best ratings ratings_pd", "The number of samples per match to determine the winner of the match", "for id in ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']),", "TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner and discriminator", "match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []} # Perform matches between each", "ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def", "len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings", "The rating that players were initialized to norm_val (float): The normalization value used", "norm_val (float): The normalization value used to convert between phi and RD Returns:", "= torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated", "= len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:]", "= torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in", "# instantiate match results match_results = {} for id in ids: match_results[id] =", "and RD Returns: (new_mu, new_phi, new_rating, new_rd) (float, float, float, float): The updated", "holding information related to training starting_rating (float): The rating that players were initialized", "(float): The normalization value used to convert between phi and RD Returns: (new_mu,", "tensor_labels (torch.Tensor): M tensor true labels for each sample Returns: acc (float): the", "ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy of", "(torch.Tensor): M tensor output of the discriminator (M samples,) probability of being class", "old_mu + new_phi**2 * np.sum(g * (scores - E)) new_rating = norm_val *", "training starting_rating (float): The rating that players were initialized to starting_RD (float): The", "refined = R(simulated) # Get discriminator accuracy on real and refined data d_pred_real", "...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new glicko values", "avg_acc >= discriminator_win_thresh: # An accuracy greater than or equal to this threshold", "considered a win for the discriminator # A score of 1 is a", "calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations", "tournament matches_per_pairing(int): The number of matches per refiner/discriminator pairing to determine the overall", "samples_per_match(int): The number of samples per match to determine the winner of the", "** -1 # Estimated variance of the player's rating based on game outcomes", "(float): The former mu rating old_phi (float): The former phi rating opponent_mus (list(float)):", "phi rating opponent_mus (list(float)): The mu ratings of the opponents played opponent_phis (list(float)):", "it's just running data through refiner and discrim. like why not just do", "but with more data? for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False)", "used to convert between phi and RD n_rounds(int): Number of rounds for the", "def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf", "Returns: (new_mu, new_phi, new_rating, new_rd) (float, float, float, float): The updated Glicko values", "the opponents played scores (list(inte)): The scores of the games played, 1 indicating", "match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An accuracy greater than or equal", "and discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings =", "to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An accuracy", "delta = v * np.sum(g * (scores - E)) # Estimated improvement in", "dictionary holding information related to training starting_rating (float): The rating that players were", "zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids, discriminators): # RODD - ?...why do", "of the discriminator (M samples,) probability of being class '1' tensor_labels (torch.Tensor): M", "tuple a of Pandas DataFrames... A Pandas DataFrame for metadata-ratings where 1 row", "match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device)", "mu rating old_phi (float): The former phi rating opponent_mus (list(float)): The mu ratings", "that players were initialized to starting_RD (float): The RD that players were initialized", "TODO: explain/figure out what g is E = 1.0 / (1 + np.exp(-1", "(1 + np.exp(-1 * g * (old_mu - opponent_mus))) # Probability of player", "like why not just do that once but with more data? for match", "replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated =", "= np.sum(g**2 * E * (1 - E)) ** -1 # Estimated variance", "the discriminator needed for the discriminator to be declared the winner Returns: A", "RODD - ?...why do we need multiple matches? why not just change samples", "between phi and RD n_rounds(int): Number of rounds for the tournament matches_per_pairing(int): The", "(float): the probability accuracy of the output vs. the true labels ''' y_pred", "match to determine the winner of the match discriminator_win_thresh: The accuracy of the", "An accuracy greater than or equal to this threshold is considered a win", "/ 2.0 # Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi'])", "value used to convert between phi and RD Returns: (new_mu, new_phi, new_rating, new_rd)", "The updated Glicko values for the player ''' g = 1.0 / (1", "a of Pandas DataFrames... A Pandas DataFrame for metadata-ratings where 1 row is", "norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ?????", "a win, 0 indicating a loss starting_rating (float): The rating that players were", "g * (old_mu - opponent_mus))) # Probability of player winning each match v", "Estimated variance of the player's rating based on game outcomes delta = v", "''' tournament to rank refiners + discriminators for simgan ''' import numpy as", "calc_acc(d_pred_refined, labels_refined) # Find the average accuracy of the discriminator avg_acc = (acc_real", "/ (1 + np.exp(-1 * g * (old_mu - opponent_mus))) # Probability of", "* np.sum(g * (scores - E)) new_rating = norm_val * new_mu + starting_rating", "indicating a win, 0 indicating a loss starting_rating (float): The rating that players", "(scores - E)) new_rating = norm_val * new_mu + starting_rating new_rd = norm_val", "if avg_acc >= discriminator_win_thresh: # An accuracy greater than or equal to this", "= ids[n_refiners:] ratings = {} for id in ids: ratings[id] = {'r': starting_rating,", "samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated", "''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new", "ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real =", "scores (list(inte)): The scores of the games played, 1 indicating a win, 0", "do we need multiple matches? why not just change samples to samples_per_match*matches_per_pairing #", "of the opponents played opponent_phis (list(float)): The phi ratings of the opponents played", "* (old_mu - opponent_mus))) # Probability of player winning each match v =", "ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings", "Perform matches between each pair (R,D) for id_R, R in zip(refiner_ids, refiners): for", "refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy of the output,", "= {'opponent_mus': [], 'opponent_phis': [], 'scores': []} # Perform matches between each pair", "(1 + 3 * opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure out", "1 row is for 1 refiner (respectively for discriminator). ''' n_refiners = len(refiners)", "match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An accuracy greater", "(float): The rating that players were initialized to starting_RD (float): The RD that", "of matches per refiner/discriminator pairing to determine the overall winner samples_per_match(int): The number", "matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ????? Find the", "E = 1.0 / (1 + np.exp(-1 * g * (old_mu - opponent_mus)))", "Find the best refiner and discriminator from the list of refiners and discriminators", "M tensor output of the discriminator (M samples,) probability of being class '1'", "win, 0 indicating a loss starting_rating (float): The rating that players were initialized", "rating that players were initialized to starting_RD (float): The RD that players were", "= calc_acc(d_pred_refined, labels_refined) # Find the average accuracy of the discriminator avg_acc =", "discriminator from the list of refiners and discriminators using the Tournament Skill Rating", "the discriminator # A score of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else:", "(1 - E)) ** -1 # Estimated variance of the player's rating based", "pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels):", "Pandas DataFrames... A Pandas DataFrame for metadata-ratings where 1 row is for 1", "refiner/discriminator pairing to determine the overall winner samples_per_match(int): The number of samples per", "norm_val (float): The normalization value used to convert between phi and RD n_rounds(int):", "Update scores for the refiners and discriminators new_ratings = ratings.copy() for id in", "class '1' tensor_labels (torch.Tensor): M tensor true labels for each sample Returns: acc", "'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real", "all_simulated = validation_data.simulated_raw for rnd in range(n_rounds): # instantiate match results match_results =", "refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)): list of discriminators validation_data (simganData): SimGAN", "(simganData): SimGAN dataset train_config (dict): dictionary holding information related to training starting_rating (float):", "number of samples per match to determine the winner of the match discriminator_win_thresh:", "in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []} # Perform matches", "= old_mu + new_phi**2 * np.sum(g * (scores - E)) new_rating = norm_val", "variance of the player's rating based on game outcomes delta = v *", "torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6):", "were initialized to starting_RD (float): The RD that players were initialized to norm_val", "get a Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner and discriminator from the", "1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for", "instantiate match results match_results = {} for id in ids: match_results[id] = {'opponent_mus':", "refiners): for id_D, D in zip(discriminator_ids, discriminators): # RODD - ?...why do we", "discriminators validation_data (simganData): SimGAN dataset train_config (dict): dictionary holding information related to training", "the best refiner and discriminator from the list of refiners and discriminators using", "samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ????? Find the best", "and RD n_rounds(int): Number of rounds for the tournament matches_per_pairing(int): The number of", "the output vs. the true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred", "= 1.0 / (1 + np.exp(-1 * g * (old_mu - opponent_mus))) #", "between phi and RD Returns: (new_mu, new_phi, new_rating, new_rd) (float, float, float, float):", "sigmoid is already calculated as part of the Discriminator Network. Parameters: tensor_output (torch.Tensor):", "A Pandas DataFrame for metadata-ratings where 1 row is for 1 refiner (respectively", "is for 1 refiner (respectively for discriminator). ''' n_refiners = len(refiners) ids =", "player ''' g = 1.0 / (1 + 3 * opponent_phis**2 / np.pi**2)", "dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined", "normalization value used to convert between phi and RD Returns: (new_mu, new_phi, new_rating,", "for the player ''' g = 1.0 / (1 + 3 * opponent_phis**2", "The number of matches per refiner/discriminator pairing to determine the overall winner samples_per_match(int):", "0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device)", "for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float,", "https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new glicko values for the player using", "or equal to this threshold is considered a win for the discriminator #", "- E)) ** -1 # Estimated variance of the player's rating based on", "win for the discriminator # A score of 1 is a win match_results[id_D]['scores'].append(1)", "match results match_results = {} for id in ids: match_results[id] = {'opponent_mus': [],", "new_ratings # Get refiner and discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings", "acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find", "not just change samples to samples_per_match*matches_per_pairing # ...like it's just running data through", "= torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis,", "# Get refiner and discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings =", "refined data d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined", "new_phi, new_rating, new_rd) (float, float, float, float): The updated Glicko values for the", "3 * opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure out what g", "get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can", "* np.sum(g * (scores - E)) # Estimated improvement in rating new_phi =", "Glicko values for the player ''' g = 1.0 / (1 + 3", "for id in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []} #", "1 refiner (respectively for discriminator). ''' n_refiners = len(refiners) ids = np.arange(n_refiners +", "the list of refiners and discriminators using the Tournament Skill Rating Evaluation. Parameters:", "player's rating based on game outcomes delta = v * np.sum(g * (scores", "(old_mu - opponent_mus))) # Probability of player winning each match v = np.sum(g**2", "new_mu = old_mu + new_phi**2 * np.sum(g * (scores - E)) new_rating =", "1 / (1/old_phi**2 + 1/v) ** 0.5 new_mu = old_mu + new_phi**2 *", "is already calculated as part of the Discriminator Network. Parameters: tensor_output (torch.Tensor): M", "# Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc", "real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds],", "rank refiners + discriminators for simgan ''' import numpy as np import pandas", "= D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the average accuracy of the", "norm_val * new_mu + starting_rating new_rd = norm_val * new_phi return new_mu, new_phi,", "labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd", "of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores", "replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) # Get discriminator accuracy", "E)) ** -1 # Estimated variance of the player's rating based on game", "to rank refiners + discriminators for simgan ''' import numpy as np import", "= (acc_real + acc_refined) / 2.0 # Add this match's results to match_results", "new_rating = norm_val * new_mu + starting_rating new_rd = norm_val * new_phi return", "real_inds = np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)),", "dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw", "id in ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']),", "(1/old_phi**2 + 1/v) ** 0.5 new_mu = old_mu + new_phi**2 * np.sum(g *", "The accuracy of the discriminator needed for the discriminator to be declared the", "probability accuracy of the output vs. the true labels ''' y_pred = torch.round(tensor_output)#.detatch())", "for discriminator). ''' n_refiners = len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids =", "Note that the sigmoid is already calculated as part of the Discriminator Network.", "as pd import torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3,", "R in zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids, discriminators): # RODD -", "- E)) new_rating = norm_val * new_mu + starting_rating new_rd = norm_val *", "used to convert between phi and RD Returns: (new_mu, new_phi, new_rating, new_rd) (float,", "* opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure out what g is", "# Estimated variance of the player's rating based on game outcomes delta =", "Find the average accuracy of the discriminator avg_acc = (acc_real + acc_refined) /", "{} for id in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []}", "for simgan ''' import numpy as np import pandas as pd import torch", "win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the refiners and", "* (1 - E)) ** -1 # Estimated variance of the player's rating", "A tuple a of Pandas DataFrames... A Pandas DataFrame for metadata-ratings where 1", "2.0 # Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if", "match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An accuracy greater than", "the refiners and discriminators new_ratings = ratings.copy() for id in ids: results =", "ratings of the opponents played opponent_phis (list(float)): The phi ratings of the opponents", "to determine the overall winner samples_per_match(int): The number of samples per match to", "torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device)", "(float, float, float, float): The updated Glicko values for the player ''' g", "on game outcomes delta = v * np.sum(g * (scores - E)) #", "refiner and discriminator from the list of refiners and discriminators using the Tournament", "refiner and discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings", "discriminator). ''' n_refiners = len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners]", "+ acc_refined) / 2.0 # Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu'])", "match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the refiners and discriminators new_ratings = ratings.copy()", "(list(torch.nn)): list of discriminators validation_data (simganData): SimGAN dataset train_config (dict): dictionary holding information", "new_ratings = ratings.copy() for id in ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'],", "list of discriminators validation_data (simganData): SimGAN dataset train_config (dict): dictionary holding information related", "= validation_data.simulated_raw for rnd in range(n_rounds): # instantiate match results match_results = {}", "(M samples,) probability of being class '1' tensor_labels (torch.Tensor): M tensor true labels", "loss starting_rating (float): The rating that players were initialized to norm_val (float): The", "discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we", "labels_refined) # Find the average accuracy of the discriminator avg_acc = (acc_real +", "(float): The normalization value used to convert between phi and RD n_rounds(int): Number", "???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new glicko values for", "np.exp(-1 * g * (old_mu - opponent_mus))) # Probability of player winning each", "match_results[id_R]['scores'].append(1) # Update scores for the refiners and discriminators new_ratings = ratings.copy() for", "and return the new glicko values for the player using Glicko2 calculation Parameters:", "convert between phi and RD n_rounds(int): Number of rounds for the tournament matches_per_pairing(int):", "the sigmoid is already calculated as part of the Discriminator Network. Parameters: tensor_output", "torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) # Get discriminator accuracy on real and", "np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings #", "RD Returns: (new_mu, new_phi, new_rating, new_rd) (float, float, float, float): The updated Glicko", "new_phi = 1 / (1/old_phi**2 + 1/v) ** 0.5 new_mu = old_mu +", "number of matches per refiner/discriminator pairing to determine the overall winner samples_per_match(int): The", "+ new_phi**2 * np.sum(g * (scores - E)) new_rating = norm_val * new_mu", "= np.random.choice(np.arange(len(all_real)), samples_per_match, replace=False) real = torch.tensor(all_real[real_inds], dtype=torch.float, device=device) sim_inds = np.random.choice(np.arange(len(all_simulated)), samples_per_match,", "0 indicating a loss starting_rating (float): The rating that players were initialized to", "/ len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): '''", "+ 3 * opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure out what", "for the tournament matches_per_pairing(int): The number of matches per refiner/discriminator pairing to determine", "for the refiners and discriminators new_ratings = ratings.copy() for id in ids: results", "id_R, R in zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids, discriminators): # RODD", "the probability accuracy of the output vs. the true labels ''' y_pred =", "Evaluation. Parameters: refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)): list of discriminators validation_data", "outcomes delta = v * np.sum(g * (scores - E)) # Estimated improvement", "1.0 / (1 + 3 * opponent_phis**2 / np.pi**2) ** 0.5 # TODO:", "old_mu (float): The former mu rating old_phi (float): The former phi rating opponent_mus", "# An accuracy greater than or equal to this threshold is considered a", "1 indicating a win, 0 indicating a loss starting_rating (float): The rating that", "the discriminator avg_acc = (acc_real + acc_refined) / 2.0 # Add this match's", "for the discriminator to be declared the winner Returns: A tuple a of", "(new_mu, new_phi, new_rating, new_rd) (float, float, float, float): The updated Glicko values for", "and refined data d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined)", "1/v) ** 0.5 new_mu = old_mu + new_phi**2 * np.sum(g * (scores -", "of the discriminator avg_acc = (acc_real + acc_refined) / 2.0 # Add this", ">= discriminator_win_thresh: # An accuracy greater than or equal to this threshold is", "output, using the labels. Note that the sigmoid is already calculated as part", "using Glicko2 calculation Parameters: old_mu (float): The former mu rating old_phi (float): The", "+ np.exp(-1 * g * (old_mu - opponent_mus))) # Probability of player winning", "for each sample Returns: acc (float): the probability accuracy of the output vs.", "'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined =", "discriminator # A score of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0)", "rating old_phi (float): The former phi rating opponent_mus (list(float)): The mu ratings of", "winner Returns: A tuple a of Pandas DataFrames... A Pandas DataFrame for metadata-ratings", "= pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output,", "RD n_rounds(int): Number of rounds for the tournament matches_per_pairing(int): The number of matches", "(list(float)): The mu ratings of the opponents played opponent_phis (list(float)): The phi ratings", "float, float, float): The updated Glicko values for the player ''' g =", "np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings", "best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings,", "* (scores - E)) # Estimated improvement in rating new_phi = 1 /", "numpy as np import pandas as pd import torch def get_graph_ratings(refiners, discriminators, validation_data,", "acc_refined) / 2.0 # Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi'])", "the new glicko values for the player using Glicko2 calculation Parameters: old_mu (float):", "in rating new_phi = 1 / (1/old_phi**2 + 1/v) ** 0.5 new_mu =", "samples per match to determine the winner of the match discriminator_win_thresh: The accuracy", "the games played, 1 indicating a win, 0 indicating a loss starting_rating (float):", "the Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor output of the discriminator (M", "do that once but with more data? for match in range(matches_per_pairing): real_inds =", "validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in range(n_rounds): # instantiate match results match_results", "= ratings.copy() for id in ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'],", "running data through refiner and discrim. like why not just do that once", "n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ????? Find", "of the output vs. the true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc =", "a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the refiners", "for the player using Glicko2 calculation Parameters: old_mu (float): The former mu rating", "# Estimated improvement in rating new_phi = 1 / (1/old_phi**2 + 1/v) **", "were initialized to norm_val (float): The normalization value used to convert between phi", "# Get discriminator accuracy on real and refined data d_pred_real = D(real) acc_real", "discriminator_ids = ids[n_refiners:] ratings = {} for id in ids: ratings[id] = {'r':", "pair (R,D) for id_R, R in zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids,", "np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings =", "(acc_real + acc_refined) / 2.0 # Add this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu'])", "E)) # Estimated improvement in rating new_phi = 1 / (1/old_phi**2 + 1/v)", "the tournament matches_per_pairing(int): The number of matches per refiner/discriminator pairing to determine the", "calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy of the output, using the labels.", "(list(torch.nn)): list of refiners discriminators (list(torch.nn)): list of discriminators validation_data (simganData): SimGAN dataset", "match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: #", "accuracy of the output vs. the true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc", "that players were initialized to norm_val (float): The normalization value used to convert", "Network. Parameters: tensor_output (torch.Tensor): M tensor output of the discriminator (M samples,) probability", "ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []} # Perform matches between", "match v = np.sum(g**2 * E * (1 - E)) ** -1 #", "player using Glicko2 calculation Parameters: old_mu (float): The former mu rating old_phi (float):", "avg_acc = (acc_real + acc_refined) / 2.0 # Add this match's results to", "initialized to norm_val (float): The normalization value used to convert between phi and", "= D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined)", "glicko values for the player using Glicko2 calculation Parameters: old_mu (float): The former", "be declared the winner Returns: A tuple a of Pandas DataFrames... A Pandas", "???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the new glicko values for the player", "- ?...why do we need multiple matches? why not just change samples to", "(dict): dictionary holding information related to training starting_rating (float): The rating that players", "np.pi**2) ** 0.5 # TODO: explain/figure out what g is E = 1.0", "each pair (R,D) for id_R, R in zip(refiner_ids, refiners): for id_D, D in", "= np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {}", "based on game outcomes delta = v * np.sum(g * (scores - E))", "starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined", "(scores - E)) # Estimated improvement in rating new_phi = 1 / (1/old_phi**2", "threshold is considered a win for the discriminator # A score of 1", "accuracy of the discriminator needed for the discriminator to be declared the winner", "= {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float,", "this match's results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh:", "need multiple matches? why not just change samples to samples_per_match*matches_per_pairing # ...like it's", "played, 1 indicating a win, 0 indicating a loss starting_rating (float): The rating", "* E * (1 - E)) ** -1 # Estimated variance of the", "data d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined =", "starting_RD (float): The RD that players were initialized to norm_val (float): The normalization", "simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) # Get discriminator accuracy on", "matches_per_pairing(int): The number of matches per refiner/discriminator pairing to determine the overall winner", "metadata-ratings where 1 row is for 1 refiner (respectively for discriminator). ''' n_refiners", "(respectively for discriminator). ''' n_refiners = len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids", "g is E = 1.0 / (1 + np.exp(-1 * g * (old_mu", "= R(simulated) # Get discriminator accuracy on real and refined data d_pred_real =", "of Pandas DataFrames... A Pandas DataFrame for metadata-ratings where 1 row is for", "already calculated as part of the Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor", "the discriminator to be declared the winner Returns: A tuple a of Pandas", "rating opponent_mus (list(float)): The mu ratings of the opponents played opponent_phis (list(float)): The", "that once but with more data? for match in range(matches_per_pairing): real_inds = np.random.choice(np.arange(len(all_real)),", "/ (1/old_phi**2 + 1/v) ** 0.5 new_mu = old_mu + new_phi**2 * np.sum(g", "values for the player using Glicko2 calculation Parameters: old_mu (float): The former mu", "{'opponent_mus': [], 'opponent_phis': [], 'scores': []} # Perform matches between each pair (R,D)", "= ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {} for id in ids: ratings[id]", "starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real =", "results match_results = {} for id in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis':", "this threshold is considered a win for the discriminator # A score of", "norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and return the", "match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An accuracy greater than or equal to", "len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {} for id in", "y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc def", "a win for the discriminator # A score of 1 is a win", "validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get", "= ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate", "ids[n_refiners:] ratings = {} for id in ids: ratings[id] = {'r': starting_rating, 'RD':", "between each pair (R,D) for id_R, R in zip(refiner_ids, refiners): for id_D, D", "(R,D) for id_R, R in zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids, discriminators):", "refiner_ids = ids[:n_refiners] discriminator_ids = ids[n_refiners:] ratings = {} for id in ids:", "opponent_mus (list(float)): The mu ratings of the opponents played opponent_phis (list(float)): The phi", "as np import pandas as pd import torch def get_graph_ratings(refiners, discriminators, validation_data, device,", "''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc", "# Update scores for the refiners and discriminators new_ratings = ratings.copy() for id", "through refiner and discrim. like why not just do that once but with", "torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated =", "d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the average accuracy of", "starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a Source?", "starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10, discriminator_win_thresh=0.6): ''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888", "The scores of the games played, 1 indicating a win, 0 indicating a", "the true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) /", "* (scores - E)) new_rating = norm_val * new_mu + starting_rating new_rd =", "new glicko values for the player using Glicko2 calculation Parameters: old_mu (float): The", "g = 1.0 / (1 + 3 * opponent_phis**2 / np.pi**2) ** 0.5", "list of refiners and discriminators using the Tournament Skill Rating Evaluation. Parameters: refiners", "return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent accuracy of the", "''' import numpy as np import pandas as pd import torch def get_graph_ratings(refiners,", "torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi,", "results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'],", "Parameters: old_mu (float): The former mu rating old_phi (float): The former phi rating", "DataFrames... A Pandas DataFrame for metadata-ratings where 1 row is for 1 refiner", "of the match discriminator_win_thresh: The accuracy of the discriminator needed for the discriminator", "== tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating,", "determine the winner of the match discriminator_win_thresh: The accuracy of the discriminator needed", "{} for id in ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0,", "return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source", "discrim. like why not just do that once but with more data? for", "'1' tensor_labels (torch.Tensor): M tensor true labels for each sample Returns: acc (float):", "[], 'opponent_phis': [], 'scores': []} # Perform matches between each pair (R,D) for", "A score of 1 is a win match_results[id_D]['scores'].append(1) match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) #", "'scores': []} # Perform matches between each pair (R,D) for id_R, R in", "device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for", "we need multiple matches? why not just change samples to samples_per_match*matches_per_pairing # ...like", "# Perform matches between each pair (R,D) for id_R, R in zip(refiner_ids, refiners):", "refiners + discriminators for simgan ''' import numpy as np import pandas as", "declared the winner Returns: A tuple a of Pandas DataFrames... A Pandas DataFrame", "rnd in range(n_rounds): # instantiate match results match_results = {} for id in", "d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined,", "opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ????", "and discrim. like why not just do that once but with more data?", "the discriminator (M samples,) probability of being class '1' tensor_labels (torch.Tensor): M tensor", "of being class '1' tensor_labels (torch.Tensor): M tensor true labels for each sample", "train_config (dict): dictionary holding information related to training starting_rating (float): The rating that", "DataFrame for metadata-ratings where 1 row is for 1 refiner (respectively for discriminator).", "ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match,", "of discriminators validation_data (simganData): SimGAN dataset train_config (dict): dictionary holding information related to", "initialized to starting_RD (float): The RD that players were initialized to norm_val (float):", "discriminator_win_thresh: The accuracy of the discriminator needed for the discriminator to be declared", "Probability of player winning each match v = np.sum(g**2 * E * (1", "glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD']", "The phi ratings of the opponents played scores (list(inte)): The scores of the", "that the sigmoid is already calculated as part of the Discriminator Network. Parameters:", "SimGAN dataset train_config (dict): dictionary holding information related to training starting_rating (float): The", "a Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner and discriminator from the list", "phi ratings of the opponents played scores (list(inte)): The scores of the games", "discriminator_win_thresh=0.6): ''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner", "vs. the true labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach())", "starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] = glicko_calculations ratings = new_ratings # Get", "why not just change samples to samples_per_match*matches_per_pairing # ...like it's just running data", "import torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350, norm_val=173.7178, n_rounds=3, matches_per_pairing=5, samples_per_match=10,", "from the list of refiners and discriminators using the Tournament Skill Rating Evaluation.", "tensor true labels for each sample Returns: acc (float): the probability accuracy of", "discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): ''' Calculate the percent", "labels ''' y_pred = torch.round(tensor_output)#.detatch()) acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return", "dataset train_config (dict): dictionary holding information related to training starting_rating (float): The rating", "results to match_results match_results[id_D]['opponent_mus'].append(ratings[id_R]['mu']) match_results[id_R]['opponent_mus'].append(ratings[id_D]['mu']) match_results[id_D]['opponent_phis'].append(ratings[id_R]['phi']) match_results[id_R]['opponent_phis'].append(ratings[id_D]['phi']) if avg_acc >= discriminator_win_thresh: # An", "tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val):", "** 0.5 # TODO: explain/figure out what g is E = 1.0 /", "the player using Glicko2 calculation Parameters: old_mu (float): The former mu rating old_phi", "new_rd) (float, float, float, float): The updated Glicko values for the player '''", "of the output, using the labels. Note that the sigmoid is already calculated", "pairing to determine the overall winner samples_per_match(int): The number of samples per match", "we get a Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner and discriminator from", "zip(discriminator_ids, discriminators): # RODD - ?...why do we need multiple matches? why not", "torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in range(n_rounds):", "id in ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val}", "opponent_phis (list(float)): The phi ratings of the opponents played scores (list(inte)): The scores", "- opponent_mus))) # Probability of player winning each match v = np.sum(g**2 *", "data through refiner and discrim. like why not just do that once but", "calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the average", "# Probability of player winning each match v = np.sum(g**2 * E *", "The normalization value used to convert between phi and RD Returns: (new_mu, new_phi,", "starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match,", "game outcomes delta = v * np.sum(g * (scores - E)) # Estimated", "= calc_acc(d_pred_real, labels_real) d_pred_refined = D(refined) acc_refined = calc_acc(d_pred_refined, labels_refined) # Find the", "= torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) # Get discriminator accuracy on real", "sample Returns: acc (float): the probability accuracy of the output vs. the true", "Rating Evaluation. Parameters: refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)): list of discriminators", "samples,) probability of being class '1' tensor_labels (torch.Tensor): M tensor true labels for", "= calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'], new_ratings[id]['RD'] =", "id in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [], 'scores': []} # Perform", "+ 1/v) ** 0.5 new_mu = old_mu + new_phi**2 * np.sum(g * (scores", "'opponent_phis': [], 'scores': []} # Perform matches between each pair (R,D) for id_R,", "ratings = new_ratings # Get refiner and discriminator with best ratings ratings_pd =", "greater than or equal to this threshold is considered a win for the", "opponent_mus))) # Probability of player winning each match v = np.sum(g**2 * E", "the Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)):", "probability of being class '1' tensor_labels (torch.Tensor): M tensor true labels for each", "import pandas as pd import torch def get_graph_ratings(refiners, discriminators, validation_data, device, starting_rating=1500, starting_rd=350,", "= validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in range(n_rounds): # instantiate match results", "to this threshold is considered a win for the discriminator # A score", "range(n_rounds): # instantiate match results match_results = {} for id in ids: match_results[id]", "in ids: ratings[id] = {'r': starting_rating, 'RD': starting_rd, 'mu': 0, 'phi': starting_rd/norm_val} labels_real", "[], 'scores': []} # Perform matches between each pair (R,D) for id_R, R", "0.5 # TODO: explain/figure out what g is E = 1.0 / (1", "labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float, device=device) all_real = validation_data.real_raw", "''' TODO...can we get a Source? https://arxiv.org/abs/1808.04888 ????? Find the best refiner and", "n_refiners = len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids =", "else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the refiners and discriminators new_ratings =", "= norm_val * new_mu + starting_rating new_rd = norm_val * new_phi return new_mu,", "Get discriminator accuracy on real and refined data d_pred_real = D(real) acc_real =", "= match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'],", "dtype=torch.float, device=device) all_real = validation_data.real_raw all_simulated = validation_data.simulated_raw for rnd in range(n_rounds): #", "Tournament Skill Rating Evaluation. Parameters: refiners (list(torch.nn)): list of refiners discriminators (list(torch.nn)): list", "= np.random.choice(np.arange(len(all_simulated)), samples_per_match, replace=False) simulated = torch.tensor(all_simulated[sim_inds], dtype=torch.float, device=device) refined = R(simulated) #", "0.5 new_mu = old_mu + new_phi**2 * np.sum(g * (scores - E)) new_rating", "Discriminator Network. Parameters: tensor_output (torch.Tensor): M tensor output of the discriminator (M samples,)", "needed for the discriminator to be declared the winner Returns: A tuple a", "former mu rating old_phi (float): The former phi rating opponent_mus (list(float)): The mu", "acc = torch.sum(y_pred == tensor_labels.detach()) / len(tensor_labels.detach()) return acc def calculate_new_glicko_scores(old_mu, old_phi, opponent_mus,", "old_phi, opponent_mus, opponent_phis, scores, starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system", "each match v = np.sum(g**2 * E * (1 - E)) ** -1", "of player winning each match v = np.sum(g**2 * E * (1 -", "of the player's rating based on game outcomes delta = v * np.sum(g", "starting_rating (float): The rating that players were initialized to norm_val (float): The normalization", "in range(n_rounds): # instantiate match results match_results = {} for id in ids:", "match_results[id_R]['scores'].append(0) else: match_results[id_D]['scores'].append(0) match_results[id_R]['scores'].append(1) # Update scores for the refiners and discriminators new_ratings", "accuracy on real and refined data d_pred_real = D(real) acc_real = calc_acc(d_pred_real, labels_real)", "match_results = {} for id in ids: match_results[id] = {'opponent_mus': [], 'opponent_phis': [],", "opponent_phis**2 / np.pi**2) ** 0.5 # TODO: explain/figure out what g is E", "for rnd in range(n_rounds): # instantiate match results match_results = {} for id", "ratings.copy() for id in ids: results = match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']),", "match_results[id] glicko_calculations = calculate_new_glicko_scores(ratings[id]['mu'], ratings[id]['phi'], np.array(results['opponent_mus']), np.array(results['opponent_phis']), np.array(results['scores']), starting_rating, norm_val) new_ratings[id]['mu'], new_ratings[id]['phi'], new_ratings[id]['r'],", "for id_R, R in zip(refiner_ids, refiners): for id_D, D in zip(discriminator_ids, discriminators): #", "phi and RD n_rounds(int): Number of rounds for the tournament matches_per_pairing(int): The number", "'mu': 0, 'phi': starting_rd/norm_val} labels_real = torch.zeros(samples_per_match, dtype=torch.float, device=device) labels_refined = torch.ones(samples_per_match, dtype=torch.float,", "v * np.sum(g * (scores - E)) # Estimated improvement in rating new_phi", "(float): The former phi rating opponent_mus (list(float)): The mu ratings of the opponents", "updated Glicko values for the player ''' g = 1.0 / (1 +", "import numpy as np import pandas as pd import torch def get_graph_ratings(refiners, discriminators,", "true labels for each sample Returns: acc (float): the probability accuracy of the", "discriminators for simgan ''' import numpy as np import pandas as pd import", "the opponents played opponent_phis (list(float)): The phi ratings of the opponents played scores", "np.sum(g**2 * E * (1 - E)) ** -1 # Estimated variance of", "- E)) # Estimated improvement in rating new_phi = 1 / (1/old_phi**2 +", "https://arxiv.org/abs/1808.04888 ????? Find the best refiner and discriminator from the list of refiners", "scores, starting_rating, norm_val): ''' TODO ...Source ???? http://www.glicko.net/glicko/glicko2.pdf ???? https://en.wikipedia.org/wiki/Glicko_rating_system ???? Calculate and", "= glicko_calculations ratings = new_ratings # Get refiner and discriminator with best ratings", "to be declared the winner Returns: A tuple a of Pandas DataFrames... A", "ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings", "E)) new_rating = norm_val * new_mu + starting_rating new_rd = norm_val * new_phi", "dtype=torch.float, device=device) refined = R(simulated) # Get discriminator accuracy on real and refined", "''' n_refiners = len(refiners) ids = np.arange(n_refiners + len(discriminators)) refiner_ids = ids[:n_refiners] discriminator_ids", "Get refiner and discriminator with best ratings ratings_pd = pd.DataFrame(ratings).T refiner_ratings = ratings_pd.loc[refiner_ids]", "Estimated improvement in rating new_phi = 1 / (1/old_phi**2 + 1/v) ** 0.5", "D in zip(discriminator_ids, discriminators): # RODD - ?...why do we need multiple matches?", "Returns: acc (float): the probability accuracy of the output vs. the true labels", "refiner_ratings = ratings_pd.loc[refiner_ids] discriminator_ratings = ratings_pd.loc[discriminator_ids] return refiner_ratings, discriminator_ratings def calc_acc(tensor_output, tensor_labels): '''", "discriminator_win_thresh: # An accuracy greater than or equal to this threshold is considered", "the player's rating based on game outcomes delta = v * np.sum(g *", "discriminator needed for the discriminator to be declared the winner Returns: A tuple", "E * (1 - E)) ** -1 # Estimated variance of the player's", "the labels. Note that the sigmoid is already calculated as part of the", "phi and RD Returns: (new_mu, new_phi, new_rating, new_rd) (float, float, float, float): The" ]
[ "j, k): index = i * self.vehicle_number * (self.customer_number + 1) + j", "k)] return x def get_fitness(self, vector): x = [[[0 for k in xrange(self.vehicle_number)]", "in xrange(self.customer_number + 1)] for i in xrange(self.customer_number + 1)] for i in", "or one car visited city twice return float(\"inf\") # check, if all vechicles", "ever that means :D return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp", "def get_num_parameters(self): # compute number of parameters return len(self._discrete_values) def use_random_seed(self): # What", "return 2 def get_index(self, i, v): # index of 0 is 0 and", "or 1 return 2 def get_index(self, i, v): # index of 0 is", "e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal", "return self._max_imp def get_hmcr(self): return self._hmcr def get_par(self): #TODO implement pitch adjustment rate", "vector[self.ijk_to_index(i, j, k)] # check, if cars were in the same town for", "1 and not visited: visited = True elif x[i][j][k] == 1 and visited:", "* (self.customer_number + 1) + j * self.vehicle_number + k return index def", "considering rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO check,", "#pitch adjusting rate def ijk_to_index(self, i, j, k): index = i * self.vehicle_number", "== 1 and not visited: visited = True elif x[i][j][k] == 1 and", "self._mpai = 1 #TODO check, if par is used directly or via function", "i): # All variables are discrete return True def get_num_parameters(self): # compute number", "CPUs - 1 so that I have one available for use num_processes =", "class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number", "0 or 1 return 2 def get_index(self, i, v): # index of 0", "visited = True elif x[i][j][k] == 1 and visited: # two cars visited", "return len(self._discrete_values) def use_random_seed(self): # What ever that means :D return hasattr(self, '_random_seed')", "= float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO check, if par is", "<problem_instance> --hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size e.g. 10,", "x[0][j][k] == 1: car_starts_from_depot = True break if not car_starts_from_depot: return float(\"inf\") max_time", "i in xrange(self.customer_number + 1)] for i in range(self.customer_number + 1): for j", "Usage: hsa.py <problem_instance> --hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size", "(self.customer_number + 1) + j * self.vehicle_number + k return index def index_to_ijk(index):", "max_time: max_time = time return max_time #TODO write vectorize solution #TODO unvectorize #TODO", "return index def index_to_ijk(index): pass def make_x_from_vector(self, vector): x = [[[0 for k", "twice return float(\"inf\") # check, if all vechicles started from depot for k", "pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3 \"\"\"", "j in range(self.customer_number + 1): if x[0][j][k] == 1: car_starts_from_depot = True break", "not car_starts_from_depot: return float(\"inf\") max_time = 0 for k in range(self.vehicle_number): time =", "from problemParser import parse_problem from docopt import docopt if __name__ == '__main__': arguments", "#define all input parameters self._maximize = False #minimize self._max_imp = int(arguments['--ni']) #maximum number", "* self.vehicle_number self._discrete_values = [] self._variable = [] for i in range(number_of_variables): self._discrete_values.append([0,", "return x def get_fitness(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for", "from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from bisect import bisect_left from multiprocessing", "def get_mpai(self): return self._mpai def get_mpap(self): #TODO remove, when it runs return 0.5", "range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters self._maximize = False #minimize self._max_imp", "means :D return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self):", "j=None): return random.randrange(2) def get_num_discrete_values(self, i): # there will be always 0 or", "1]) self._variable.append(True) #define all input parameters self._maximize = False #minimize self._max_imp = int(arguments['--ni'])", "logical CPUs - 1 so that I have one available for use num_processes", "j # 0 otherwise number_of_variables = (self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values", "--hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size e.g. 10, 20,", "def get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr def get_par(self): #TODO implement pitch", "self._hmcr def get_par(self): #TODO implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par", "remove, when it runs return 0.5 def maximize(self): return self._maximize from problemParser import", "one car visited city twice return float(\"inf\") # check, if all vechicles started", "1)] for i in xrange(self.customer_number + 1)] for i in range(self.customer_number + 1):", "# two cars visited city or one car visited city twice return float(\"inf\")", "self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr def get_par(self): #TODO implement", "e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3 \"\"\" from problemParser import", "0.3 \"\"\" from problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random", "via function self._par = 0.5 #pitch adjusting rate def ijk_to_index(self, i, j, k):", "1: car_starts_from_depot = True break if not car_starts_from_depot: return float(\"inf\") max_time = 0", "docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>'])", "discrete return True def get_num_parameters(self): # compute number of parameters return len(self._discrete_values) def", "500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch", "traveled from i to j # 0 otherwise number_of_variables = (self.customer_number + 1)**2", "in range(self.customer_number + 1): for k in range(self.vehicle_number): if x[i][j][k] == 1 and", "return 5.0 def get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self, i): # there", "def get_mpap(self): #TODO remove, when it runs return 0.5 def maximize(self): return self._maximize", "0.8 --ni=<ni> Number of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment", "all input parameters self._maximize = False #minimize self._max_imp = int(arguments['--ni']) #maximum number of", "e.g. 0.3 \"\"\" from problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import", "return self._hms def get_mpai(self): return self._mpai def get_mpap(self): #TODO remove, when it runs", "+ 1)] for i in range(self.customer_number + 1): for j in range(self.customer_number +", "cpu_count() - 1 #use number of logical CPUs - 1 so that I", "False #minimize self._max_imp = int(arguments['--ni']) #maximum number of improvisations self._hms = int(arguments['--hms']) #harmony", "range(self.vehicle_number): if x[i][j][k] == 1 and not visited: visited = True elif x[i][j][k]", "x[i][j][k] == 1 and not visited: visited = True elif x[i][j][k] == 1", "1: time += self.problem_instance['t'][i][j] if time > max_time: max_time = time return max_time", "ijk_to_index(self, i, j, k): index = i * self.vehicle_number * (self.customer_number + 1)", "visited city twice return float(\"inf\") # check, if all vechicles started from depot", "#minimize self._max_imp = int(arguments['--ni']) #maximum number of improvisations self._hms = int(arguments['--hms']) #harmony memory", "k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check, if cars were", "float(\"inf\") # check, if all vechicles started from depot for k in range(self.vehicle_number):", "from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__) problem_instance =", "depot for k in range(self.vehicle_number): car_starts_from_depot = False for j in range(self.customer_number +", "True def get_num_parameters(self): # compute number of parameters return len(self._discrete_values) def use_random_seed(self): #", "--hms=<hms> Harmony memory size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory consideration rate", "parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from bisect import bisect_left from", "that means :D return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp def", "range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self, vector): x =", "memory considering rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO", "from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance", "when it runs return 0.5 def maximize(self): return self._maximize from problemParser import parse_problem", "is 1 in [0, 1] return v def is_variable(self, i): return self._variable[i] def", "30... --hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of", "float(\"inf\") max_time = 0 for k in range(self.vehicle_number): time = 0 for i", "if x[0][j][k] == 1: car_starts_from_depot = True break if not car_starts_from_depot: return float(\"inf\")", "return 0.5 def maximize(self): return self._maximize from problemParser import parse_problem from docopt import", "self.vehicle_number * (self.customer_number + 1) + j * self.vehicle_number + k return index", "#TODO remove, when it runs return 0.5 def maximize(self): return self._maximize from problemParser", "e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6, 0.7, 0.8", "index = i * self.vehicle_number * (self.customer_number + 1) + j * self.vehicle_number", "i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters self._maximize = False", "have one available for use num_processes = 1 num_iterations = 100 (result, value)", "use num_processes = 1 num_iterations = 100 (result, value) = (harmony_search(obj_fun, num_processes, num_iterations))", "+ 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check,", "j in range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j,", "range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] #", "for j in range(self.customer_number + 1): visited = False for i in range(self.customer_number", "get_mpap(self): #TODO remove, when it runs return 0.5 def maximize(self): return self._maximize from", "be always 0 or 1 return 2 def get_index(self, i, v): # index", "0.6, 0.7, 0.8 --ni=<ni> Number of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal", "v def is_variable(self, i): return self._variable[i] def is_discrete(self, i): # All variables are", "if cars were in the same town for j in range(self.customer_number + 1):", "= problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle k traveled", "car_starts_from_depot = False for j in range(self.customer_number + 1): if x[0][j][k] == 1:", "is used directly or via function self._par = 0.5 #pitch adjusting rate def", "= VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1 #use number of logical CPUs", "in range(self.vehicle_number): if x[i][j][k] == 1 and not visited: visited = True elif", "obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1 #use number of logical", "docopt if __name__ == '__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun =", "self._variable = [] for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input", "0.5 def maximize(self): return self._maximize from problemParser import parse_problem from docopt import docopt", "x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check, if cars were in the same", "k in xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)] for i in xrange(self.customer_number", "= int(arguments['--ni']) #maximum number of improvisations self._hms = int(arguments['--hms']) #harmony memory size self._hmcr", "1): if x[0][j][k] == 1: car_starts_from_depot = True break if not car_starts_from_depot: return", "1] return v def is_variable(self, i): return self._variable[i] def is_discrete(self, i): # All", "+ 1): if x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if time > max_time:", "--ni=<ni> Number of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate", "self.vehicle_number self._discrete_values = [] self._variable = [] for i in range(number_of_variables): self._discrete_values.append([0, 1])", "memory consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of improvisations e.g. 500,", "2 def get_index(self, i, v): # index of 0 is 0 and index", "import docopt if __name__ == '__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun", "get_par(self): #TODO implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self):", "max_time = time return max_time #TODO write vectorize solution #TODO unvectorize #TODO implement", "will be always 0 or 1 return 2 def get_index(self, i, v): #", "get_num_parameters(self): # compute number of parameters return len(self._discrete_values) def use_random_seed(self): # What ever", "1): if x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if time > max_time: max_time", "fitness return 5.0 def get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self, i): #", "+ 1): for j in range(self.customer_number + 1): if x[i][j][k] == 1: time", "always 0 or 1 return 2 def get_index(self, i, v): # index of", "'_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr def get_par(self):", "were in the same town for j in range(self.customer_number + 1): visited =", "--parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony", "runs return 0.5 def maximize(self): return self._maximize from problemParser import parse_problem from docopt", "maximize(self): return self._maximize from problemParser import parse_problem from docopt import docopt if __name__", "arguments, problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k]", "= False for j in range(self.customer_number + 1): if x[0][j][k] == 1: car_starts_from_depot", "j in xrange(self.customer_number + 1)] for i in xrange(self.customer_number + 1)] for i", "from i to j # 0 otherwise number_of_variables = (self.customer_number + 1)**2 \\", "self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO check, if par", "the same town for j in range(self.customer_number + 1): visited = False for", "so that I have one available for use num_processes = 1 num_iterations =", "j in range(self.customer_number + 1): visited = False for i in range(self.customer_number +", "if par is used directly or via function self._par = 0.5 #pitch adjusting", "1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x def", "rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO check, if", "from depot for k in range(self.vehicle_number): car_starts_from_depot = False for j in range(self.customer_number", "5.0 def get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self, i): # there will", "hsa.py <problem_instance> --hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size e.g.", "- 1 so that I have one available for use num_processes = 1", "#TODO unvectorize #TODO implement fitness return 5.0 def get_value(self, i, j=None): return random.randrange(2)", "self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1", "False for i in range(self.customer_number + 1): for k in range(self.vehicle_number): if x[i][j][k]", "1): for j in range(self.customer_number + 1): if x[i][j][k] == 1: time +=", "memory size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6,", "range(self.vehicle_number): car_starts_from_depot = False for j in range(self.customer_number + 1): if x[0][j][k] ==", "self.problem_instance['t'][i][j] if time > max_time: max_time = time return max_time #TODO write vectorize", "one available for use num_processes = 1 num_iterations = 100 (result, value) =", "if time > max_time: max_time = time return max_time #TODO write vectorize solution", "range(self.customer_number + 1): for j in range(self.customer_number + 1): for k in range(self.vehicle_number):", "write vectorize solution #TODO unvectorize #TODO implement fitness return 5.0 def get_value(self, i,", "num_iterations = 100 (result, value) = (harmony_search(obj_fun, num_processes, num_iterations)) print obj_fun.make_x_from_vector(result) print value", "k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self, vector):", "What ever that means :D return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return", "k in range(self.vehicle_number): time = 0 for i in range(self.customer_number + 1): for", "1 #TODO check, if par is used directly or via function self._par =", "#maximum number of improvisations self._hms = int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr'])", "* self.vehicle_number * (self.customer_number + 1) + j * self.vehicle_number + k return", "make_x_from_vector(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number", "--parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size e.g. 10, 20, 30... --hmcr=<hmcr>", "1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment", "VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number =", "+ j * self.vehicle_number + k return index def index_to_ijk(index): pass def make_x_from_vector(self,", "import parse_problem from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__)", "max_time = 0 for k in range(self.vehicle_number): time = 0 for i in", "self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle k", "for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)] for i in", "time += self.problem_instance['t'][i][j] if time > max_time: max_time = time return max_time #TODO", "# check, if all vechicles started from depot for k in range(self.vehicle_number): car_starts_from_depot", "unvectorize #TODO implement fitness return 5.0 def get_value(self, i, j=None): return random.randrange(2) def", "two cars visited city or one car visited city twice return float(\"inf\") #", "= 1 #TODO check, if par is used directly or via function self._par", "check, if par is used directly or via function self._par = 0.5 #pitch", "1 return 2 def get_index(self, i, v): # index of 0 is 0", "hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr def", "0 is 0 and index of 1 is 1 in [0, 1] return", "# What ever that means :D return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self):", "1 num_iterations = 100 (result, value) = (harmony_search(obj_fun, num_processes, num_iterations)) print obj_fun.make_x_from_vector(result) print", "in [0, 1] return v def is_variable(self, i): return self._variable[i] def is_discrete(self, i):", "implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms", "xrange(self.customer_number + 1)] for i in range(self.customer_number + 1): for j in range(self.customer_number", "def ijk_to_index(self, i, j, k): index = i * self.vehicle_number * (self.customer_number +", "used directly or via function self._par = 0.5 #pitch adjusting rate def ijk_to_index(self,", "city twice return float(\"inf\") # check, if all vechicles started from depot for", "of improvisations self._hms = int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony memory", "= vector[self.ijk_to_index(i, j, k)] # check, if cars were in the same town", "import bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance", "adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms def get_mpai(self):", "x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if time > max_time: max_time = time", "in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check, if cars were in", "= 0.5 #pitch adjusting rate def ijk_to_index(self, i, j, k): index = i", "- 1 #use number of logical CPUs - 1 so that I have", "break if not car_starts_from_depot: return float(\"inf\") max_time = 0 for k in range(self.vehicle_number):", "return float(\"inf\") # check, if all vechicles started from depot for k in", "+ 1) + j * self.vehicle_number + k return index def index_to_ijk(index): pass", "+ 1): for j in range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k]", "and not visited: visited = True elif x[i][j][k] == 1 and visited: #", "2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate", "'__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes =", "return self._par def get_hms(self): return self._hms def get_mpai(self): return self._mpai def get_mpap(self): #TODO", "len(self._discrete_values) def use_random_seed(self): # What ever that means :D return hasattr(self, '_random_seed') and", "20, 30... --hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number", "+ 1): if x[0][j][k] == 1: car_starts_from_depot = True break if not car_starts_from_depot:", "cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number']", "problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] =", "j * self.vehicle_number + k return index def index_to_ijk(index): pass def make_x_from_vector(self, vector):", "get_hms(self): return self._hms def get_mpai(self): return self._mpai def get_mpap(self): #TODO remove, when it", "of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9", "self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO check, if par is used directly", "i in range(self.customer_number + 1): for k in range(self.vehicle_number): if x[i][j][k] == 1", "cars visited city or one car visited city twice return float(\"inf\") # check,", "self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters self._maximize = False #minimize self._max_imp =", "x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self, vector): x = [[[0", "return v def is_variable(self, i): return self._variable[i] def is_discrete(self, i): # All variables", "rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of improvisations e.g. 500, 1000, 2000", "#TODO write vectorize solution #TODO unvectorize #TODO implement fitness return 5.0 def get_value(self,", "return self._mpai def get_mpap(self): #TODO remove, when it runs return 0.5 def maximize(self):", "get_num_discrete_values(self, i): # there will be always 0 or 1 return 2 def", "range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check, if cars were in the", "problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle k traveled from i to j", "k return index def index_to_ijk(index): pass def make_x_from_vector(self, vector): x = [[[0 for", "+ 1)] for i in xrange(self.customer_number + 1)] for i in range(self.customer_number +", "j in range(self.customer_number + 1): if x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if", "variables are discrete return True def get_num_parameters(self): # compute number of parameters return", "if not car_starts_from_depot: return float(\"inf\") max_time = 0 for k in range(self.vehicle_number): time", "= cpu_count() - 1 #use number of logical CPUs - 1 so that", "for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters self._maximize =", "= int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate self._parmin", "it runs return 0.5 def maximize(self): return self._maximize from problemParser import parse_problem from", "[] for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters self._maximize", "in range(self.vehicle_number): time = 0 for i in range(self.customer_number + 1): for j", "harmony_search import random from bisect import bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface):", "return random.randrange(2) def get_num_discrete_values(self, i): # there will be always 0 or 1", "in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters self._maximize = False #minimize", "__init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] #", "k)] # check, if cars were in the same town for j in", "0 otherwise number_of_variables = (self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values = []", "size self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate self._parmin = float(arguments['--parmin']) self._parmax =", "= time return max_time #TODO write vectorize solution #TODO unvectorize #TODO implement fitness", "or via function self._par = 0.5 #pitch adjusting rate def ijk_to_index(self, i, j,", "in xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)] for i in xrange(self.customer_number +", "and index of 1 is 1 in [0, 1] return v def is_variable(self,", "elif x[i][j][k] == 1 and visited: # two cars visited city or one", "range(self.customer_number + 1): if x[0][j][k] == 1: car_starts_from_depot = True break if not", "of logical CPUs - 1 so that I have one available for use", "visited = False for i in range(self.customer_number + 1): for k in range(self.vehicle_number):", "range(self.customer_number + 1): for j in range(self.customer_number + 1): if x[i][j][k] == 1:", "0 for i in range(self.customer_number + 1): for j in range(self.customer_number + 1):", "def index_to_ijk(index): pass def make_x_from_vector(self, vector): x = [[[0 for k in xrange(self.vehicle_number)]", "time > max_time: max_time = time return max_time #TODO write vectorize solution #TODO", "--hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of improvisations", "[] self._variable = [] for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all", "Minimal pitch adjustment rate e.g. 0.3 \"\"\" from problemParser import parse_problem from pyharmonysearch", "j, k)] return x def get_fitness(self, vector): x = [[[0 for k in", "# compute number of parameters return len(self._discrete_values) def use_random_seed(self): # What ever that", "that I have one available for use num_processes = 1 num_iterations = 100", "get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr def get_par(self): #TODO implement pitch adjustment", "if __name__ == '__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments,", "def get_num_discrete_values(self, i): # there will be always 0 or 1 return 2", "for i in range(self.customer_number + 1): for j in range(self.customer_number + 1): if", "= 0 for k in range(self.vehicle_number): time = 0 for i in range(self.customer_number", "if x[i][j][k] == 1 and not visited: visited = True elif x[i][j][k] ==", "# x[i][j][k] = 1 iff vehicle k traveled from i to j #", "for i in range(self.customer_number + 1): for k in range(self.vehicle_number): if x[i][j][k] ==", "pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms def", "range(self.vehicle_number): time = 0 for i in range(self.customer_number + 1): for j in", "#TODO implement fitness return 5.0 def get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self,", "in range(self.vehicle_number): car_starts_from_depot = False for j in range(self.customer_number + 1): if x[0][j][k]", "1) + j * self.vehicle_number + k return index def index_to_ijk(index): pass def", "<reponame>tweinyan/hsavrptw #!/usr/bin/python \"\"\"hsa Usage: hsa.py <problem_instance> --hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms>", "num_processes = 1 num_iterations = 100 (result, value) = (harmony_search(obj_fun, num_processes, num_iterations)) print", "function self._par = 0.5 #pitch adjusting rate def ijk_to_index(self, i, j, k): index", "for use num_processes = 1 num_iterations = 100 (result, value) = (harmony_search(obj_fun, num_processes,", "def make_x_from_vector(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for j in", "is 0 and index of 1 is 1 in [0, 1] return v", "# index of 0 is 0 and index of 1 is 1 in", "of 0 is 0 and index of 1 is 1 in [0, 1]", "\"\"\" from problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from", "city or one car visited city twice return float(\"inf\") # check, if all", "* self.vehicle_number + k return index def index_to_ijk(index): pass def make_x_from_vector(self, vector): x", "1 #use number of logical CPUs - 1 so that I have one", "bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance =", "1 iff vehicle k traveled from i to j # 0 otherwise number_of_variables", "= vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self, vector): x = [[[0 for", "and self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr def get_par(self): #TODO", "return self._maximize from problemParser import parse_problem from docopt import docopt if __name__ ==", "# 0 otherwise number_of_variables = (self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values =", "max_time #TODO write vectorize solution #TODO unvectorize #TODO implement fitness return 5.0 def", "vehicle k traveled from i to j # 0 otherwise number_of_variables = (self.customer_number", "xrange(self.customer_number + 1)] for i in xrange(self.customer_number + 1)] for i in range(self.customer_number", "implement fitness return 5.0 def get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self, i):", "def get_fitness(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for j in", "self._max_imp def get_hmcr(self): return self._hmcr def get_par(self): #TODO implement pitch adjustment rate accroding", "multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number", "car_starts_from_depot: return float(\"inf\") max_time = 0 for k in range(self.vehicle_number): time = 0", "range(self.customer_number + 1): visited = False for i in range(self.customer_number + 1): for", "def use_random_seed(self): # What ever that means :D return hasattr(self, '_random_seed') and self._random_seed", "problem_instance) num_processes = cpu_count() - 1 #use number of logical CPUs - 1", "return self._hmcr def get_par(self): #TODO implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return", "i to j # 0 otherwise number_of_variables = (self.customer_number + 1)**2 \\ *", "= i * self.vehicle_number * (self.customer_number + 1) + j * self.vehicle_number +", "1 so that I have one available for use num_processes = 1 num_iterations", "parameters return len(self._discrete_values) def use_random_seed(self): # What ever that means :D return hasattr(self,", "for k in range(self.vehicle_number): if x[i][j][k] == 1 and not visited: visited =", "adjustment rate e.g. 0.3 \"\"\" from problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface,", "1 is 1 in [0, 1] return v def is_variable(self, i): return self._variable[i]", "1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check, if", "if all vechicles started from depot for k in range(self.vehicle_number): car_starts_from_depot = False", "Harmony memory size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory consideration rate e.g.", "i): # there will be always 0 or 1 return 2 def get_index(self,", "i): return self._variable[i] def is_discrete(self, i): # All variables are discrete return True", "pass def make_x_from_vector(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for j", "= True break if not car_starts_from_depot: return float(\"inf\") max_time = 0 for k", "(self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values = [] self._variable = [] for", "parameters self._maximize = False #minimize self._max_imp = int(arguments['--ni']) #maximum number of improvisations self._hms", "float(arguments['--hmcr']) #harmony memory considering rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai =", "cars were in the same town for j in range(self.customer_number + 1): visited", "bisect import bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance):", "Harmony memory consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of improvisations e.g.", "#TODO implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return", "= [] for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define all input parameters", "import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from bisect import bisect_left", "of parameters return len(self._discrete_values) def use_random_seed(self): # What ever that means :D return", "same town for j in range(self.customer_number + 1): visited = False for i", "1 and visited: # two cars visited city or one car visited city", "check, if cars were in the same town for j in range(self.customer_number +", "time = 0 for i in range(self.customer_number + 1): for j in range(self.customer_number", "self._par = 0.5 #pitch adjusting rate def ijk_to_index(self, i, j, k): index =", "= docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() -", "+= self.problem_instance['t'][i][j] if time > max_time: max_time = time return max_time #TODO write", "index of 1 is 1 in [0, 1] return v def is_variable(self, i):", "index of 0 is 0 and index of 1 is 1 in [0,", "get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self, i): # there will be always", "[[[0 for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)] for i", "def get_par(self): #TODO implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def", "= (self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values = [] self._variable = []", "+ 1): visited = False for i in range(self.customer_number + 1): for k", "get_index(self, i, v): # index of 0 is 0 and index of 1", "j, k)] # check, if cars were in the same town for j", "def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number']", "of 1 is 1 in [0, 1] return v def is_variable(self, i): return", "iff vehicle k traveled from i to j # 0 otherwise number_of_variables =", "Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3", "to j # 0 otherwise number_of_variables = (self.customer_number + 1)**2 \\ * self.vehicle_number", "range(self.customer_number + 1): for k in range(self.vehicle_number): if x[i][j][k] == 1 and not", "vector): x = [[[0 for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number +", "solution #TODO unvectorize #TODO implement fitness return 5.0 def get_value(self, i, j=None): return", "self._mpai def get_mpap(self): #TODO remove, when it runs return 0.5 def maximize(self): return", "+ 1)**2 \\ * self.vehicle_number self._discrete_values = [] self._variable = [] for i", "= [] self._variable = [] for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True) #define", "self._variable[i] def is_discrete(self, i): # All variables are discrete return True def get_num_parameters(self):", "= float(arguments['--parmax']) self._mpai = 1 #TODO check, if par is used directly or", "\"\"\"hsa Usage: hsa.py <problem_instance> --hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory", "self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax'])", "in range(self.customer_number + 1): if x[0][j][k] == 1: car_starts_from_depot = True break if", "use_random_seed(self): # What ever that means :D return hasattr(self, '_random_seed') and self._random_seed def", "k): index = i * self.vehicle_number * (self.customer_number + 1) + j *", "1): for j in range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k] =", "is_variable(self, i): return self._variable[i] def is_discrete(self, i): # All variables are discrete return", "self._hms def get_mpai(self): return self._mpai def get_mpap(self): #TODO remove, when it runs return", "\\ * self.vehicle_number self._discrete_values = [] self._variable = [] for i in range(number_of_variables):", "in the same town for j in range(self.customer_number + 1): visited = False", "rate def ijk_to_index(self, i, j, k): index = i * self.vehicle_number * (self.customer_number", "--hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony memory size e.g. 10, 20, 30...", "rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3 \"\"\" from problemParser", "number of logical CPUs - 1 so that I have one available for", "from problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from bisect", "self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle k traveled from i", "x = [[[0 for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)]", "= 1 num_iterations = 100 (result, value) = (harmony_search(obj_fun, num_processes, num_iterations)) print obj_fun.make_x_from_vector(result)", "i, j=None): return random.randrange(2) def get_num_discrete_values(self, i): # there will be always 0", "e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax>", "= float(arguments['--hmcr']) #harmony memory considering rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai", "if x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if time > max_time: max_time =", "= False #minimize self._max_imp = int(arguments['--ni']) #maximum number of improvisations self._hms = int(arguments['--hms'])", "self._maximize = False #minimize self._max_imp = int(arguments['--ni']) #maximum number of improvisations self._hms =", "+ 1): for k in range(self.vehicle_number): if x[i][j][k] == 1 and not visited:", "otherwise number_of_variables = (self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values = [] self._variable", "i, j, k): index = i * self.vehicle_number * (self.customer_number + 1) +", "10, 20, 30... --hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni>", "memory size self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate self._parmin = float(arguments['--parmin']) self._parmax", ":D return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self): return", "from bisect import bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments,", "Number of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g.", "k traveled from i to j # 0 otherwise number_of_variables = (self.customer_number +", "= problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle k traveled from i to", "in range(self.customer_number + 1): if x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if time", "[0, 1] return v def is_variable(self, i): return self._variable[i] def is_discrete(self, i): #", "ObjectiveFunctionInterface, harmony_search import random from bisect import bisect_left from multiprocessing import cpu_count class", "All variables are discrete return True def get_num_parameters(self): # compute number of parameters", "accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms def get_mpai(self): return self._mpai", "town for j in range(self.customer_number + 1): visited = False for i in", "i in range(self.customer_number + 1): for j in range(self.customer_number + 1): for k", "problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1 #use", "random.randrange(2) def get_num_discrete_values(self, i): # there will be always 0 or 1 return", "#harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate self._parmin = float(arguments['--parmin'])", "for i in xrange(self.customer_number + 1)] for i in range(self.customer_number + 1): for", "adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3 \"\"\" from", "available for use num_processes = 1 num_iterations = 100 (result, value) = (harmony_search(obj_fun,", "i in range(self.customer_number + 1): for j in range(self.customer_number + 1): if x[i][j][k]", "+ k return index def index_to_ijk(index): pass def make_x_from_vector(self, vector): x = [[[0", "xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)] for i in xrange(self.customer_number + 1)]", "True break if not car_starts_from_depot: return float(\"inf\") max_time = 0 for k in", "= True elif x[i][j][k] == 1 and visited: # two cars visited city", "0.7, 0.8 --ni=<ni> Number of improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch", "x[i][j][k] == 1 and visited: # two cars visited city or one car", "def is_discrete(self, i): # All variables are discrete return True def get_num_parameters(self): #", "started from depot for k in range(self.vehicle_number): car_starts_from_depot = False for j in", "size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory consideration rate e.g. 0.6, 0.7,", "index_to_ijk(index): pass def make_x_from_vector(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for", "def get_hms(self): return self._hms def get_mpai(self): return self._mpai def get_mpap(self): #TODO remove, when", "for k in range(self.vehicle_number): car_starts_from_depot = False for j in range(self.customer_number + 1):", "== 1: time += self.problem_instance['t'][i][j] if time > max_time: max_time = time return", "self._par def get_hms(self): return self._hms def get_mpai(self): return self._mpai def get_mpap(self): #TODO remove,", "int(arguments['--ni']) #maximum number of improvisations self._hms = int(arguments['--hms']) #harmony memory size self._hmcr =", "self._variable.append(True) #define all input parameters self._maximize = False #minimize self._max_imp = int(arguments['--ni']) #maximum", "are discrete return True def get_num_parameters(self): # compute number of parameters return len(self._discrete_values)", "get_fitness(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number", "index def index_to_ijk(index): pass def make_x_from_vector(self, vector): x = [[[0 for k in", "return hasattr(self, '_random_seed') and self._random_seed def get_max_imp(self): return self._max_imp def get_hmcr(self): return self._hmcr", "in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self, vector): x", "rate e.g. 0.3 \"\"\" from problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search", "VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1 #use number of logical CPUs -", "1 in [0, 1] return v def is_variable(self, i): return self._variable[i] def is_discrete(self,", "def get_index(self, i, v): # index of 0 is 0 and index of", "--ni=<ni> Options: --hms=<hms> Harmony memory size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory", "problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle k traveled from", "import random from bisect import bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def", "1): visited = False for i in range(self.customer_number + 1): for k in", "input parameters self._maximize = False #minimize self._max_imp = int(arguments['--ni']) #maximum number of improvisations", "in range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)]", "parse_problem from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__) problem_instance", "= parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1 #use number", "in range(self.customer_number + 1): for j in range(self.customer_number + 1): for k in", "self.vehicle_number + k return index def index_to_ijk(index): pass def make_x_from_vector(self, vector): x =", "0.5 #pitch adjusting rate def ijk_to_index(self, i, j, k): index = i *", "in xrange(self.customer_number + 1)] for i in range(self.customer_number + 1): for j in", "== 1 and visited: # two cars visited city or one car visited", "to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms def get_mpai(self): return self._mpai def", "parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1 #use number of", "http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms def get_mpai(self): return self._mpai def get_mpap(self):", "adjusting rate def ijk_to_index(self, i, j, k): index = i * self.vehicle_number *", "i, v): # index of 0 is 0 and index of 1 is", "visited: # two cars visited city or one car visited city twice return", "not visited: visited = True elif x[i][j][k] == 1 and visited: # two", "int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate self._parmin =", "self._maximize from problemParser import parse_problem from docopt import docopt if __name__ == '__main__':", "= problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1 iff", "improvisations e.g. 500, 1000, 2000 --parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin>", "docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count() - 1", "i * self.vehicle_number * (self.customer_number + 1) + j * self.vehicle_number + k", "# check, if cars were in the same town for j in range(self.customer_number", "1): for k in range(self.vehicle_number): if x[i][j][k] == 1 and not visited: visited", "return True def get_num_parameters(self): # compute number of parameters return len(self._discrete_values) def use_random_seed(self):", "#!/usr/bin/python \"\"\"hsa Usage: hsa.py <problem_instance> --hms=<hms> --hmcr=<hmcr> --parmax=<par> --parmin=<parmin> --ni=<ni> Options: --hms=<hms> Harmony", "#harmony memory considering rate self._parmin = float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1", "0 for k in range(self.vehicle_number): time = 0 for i in range(self.customer_number +", "problemParser import parse_problem from pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from bisect import", "import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self, arguments, problem_instance): self.problem_instance = problem_instance self.customer_number =", "#TODO check, if par is used directly or via function self._par = 0.5", "x[i][j][k] = 1 iff vehicle k traveled from i to j # 0", "= False for i in range(self.customer_number + 1): for k in range(self.vehicle_number): if", "car visited city twice return float(\"inf\") # check, if all vechicles started from", "#use number of logical CPUs - 1 so that I have one available", "pyharmonysearch import ObjectiveFunctionInterface, harmony_search import random from bisect import bisect_left from multiprocessing import", "visited: visited = True elif x[i][j][k] == 1 and visited: # two cars", "float(arguments['--parmin']) self._parmax = float(arguments['--parmax']) self._mpai = 1 #TODO check, if par is used", "get_mpai(self): return self._mpai def get_mpap(self): #TODO remove, when it runs return 0.5 def", "vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self, vector): x = [[[0 for k", "all vechicles started from depot for k in range(self.vehicle_number): car_starts_from_depot = False for", "range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return", "for k in range(self.vehicle_number): time = 0 for i in range(self.customer_number + 1):", "get_hmcr(self): return self._hmcr def get_par(self): #TODO implement pitch adjustment rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf", "there will be always 0 or 1 return 2 def get_index(self, i, v):", "def get_value(self, i, j=None): return random.randrange(2) def get_num_discrete_values(self, i): # there will be", "return self._variable[i] def is_discrete(self, i): # All variables are discrete return True def", "def get_hmcr(self): return self._hmcr def get_par(self): #TODO implement pitch adjustment rate accroding to", "--parxmax=<parmax> Maximal pitch adjustment rate e.g. 0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g.", "> max_time: max_time = time return max_time #TODO write vectorize solution #TODO unvectorize", "= [[[0 for k in xrange(self.vehicle_number)] for j in xrange(self.customer_number + 1)] for", "improvisations self._hms = int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony memory considering", "self._hms = int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony memory considering rate", "self._max_imp = int(arguments['--ni']) #maximum number of improvisations self._hms = int(arguments['--hms']) #harmony memory size", "for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] # check, if cars", "__name__ == '__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance)", "for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x def get_fitness(self,", "1)**2 \\ * self.vehicle_number self._discrete_values = [] self._variable = [] for i in", "+ 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i, j, k)] return x", "visited city or one car visited city twice return float(\"inf\") # check, if", "problem_instance self.customer_number = problem_instance['customer_number'] self.vehicle_number = problem_instance['vehicle_number'] # x[i][j][k] = 1 iff vehicle", "vectorize solution #TODO unvectorize #TODO implement fitness return 5.0 def get_value(self, i, j=None):", "car_starts_from_depot = True break if not car_starts_from_depot: return float(\"inf\") max_time = 0 for", "# there will be always 0 or 1 return 2 def get_index(self, i,", "== 1: car_starts_from_depot = True break if not car_starts_from_depot: return float(\"inf\") max_time =", "self._discrete_values = [] self._variable = [] for i in range(number_of_variables): self._discrete_values.append([0, 1]) self._variable.append(True)", "number_of_variables = (self.customer_number + 1)**2 \\ * self.vehicle_number self._discrete_values = [] self._variable =", "1)] for i in range(self.customer_number + 1): for j in range(self.customer_number + 1):", "for i in range(self.customer_number + 1): for j in range(self.customer_number + 1): for", "problemParser import parse_problem from docopt import docopt if __name__ == '__main__': arguments =", "== '__main__': arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes", "float(arguments['--parmax']) self._mpai = 1 #TODO check, if par is used directly or via", "k in range(self.vehicle_number): car_starts_from_depot = False for j in range(self.customer_number + 1): if", "= 0 for i in range(self.customer_number + 1): for j in range(self.customer_number +", "number of parameters return len(self._discrete_values) def use_random_seed(self): # What ever that means :D", "0.9 --parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3 \"\"\" from problemParser import parse_problem", "arguments = docopt(__doc__) problem_instance = parse_problem(arguments['<problem_instance>']) obj_fun = VRPTWObjectiveFunction(arguments, problem_instance) num_processes = cpu_count()", "for j in range(self.customer_number + 1): if x[i][j][k] == 1: time += self.problem_instance['t'][i][j]", "par is used directly or via function self._par = 0.5 #pitch adjusting rate", "is_discrete(self, i): # All variables are discrete return True def get_num_parameters(self): # compute", "consideration rate e.g. 0.6, 0.7, 0.8 --ni=<ni> Number of improvisations e.g. 500, 1000,", "in range(self.customer_number + 1): for j in range(self.customer_number + 1): if x[i][j][k] ==", "check, if all vechicles started from depot for k in range(self.vehicle_number): car_starts_from_depot =", "k in range(self.vehicle_number): if x[i][j][k] == 1 and not visited: visited = True", "time return max_time #TODO write vectorize solution #TODO unvectorize #TODO implement fitness return", "vechicles started from depot for k in range(self.vehicle_number): car_starts_from_depot = False for j", "0 and index of 1 is 1 in [0, 1] return v def", "for j in range(self.customer_number + 1): for k in range(self.vehicle_number): x[i][j][k] = vector[self.ijk_to_index(i,", "Options: --hms=<hms> Harmony memory size e.g. 10, 20, 30... --hmcr=<hmcr> Harmony memory consideration", "num_processes = cpu_count() - 1 #use number of logical CPUs - 1 so", "return float(\"inf\") max_time = 0 for k in range(self.vehicle_number): time = 0 for", "directly or via function self._par = 0.5 #pitch adjusting rate def ijk_to_index(self, i,", "x def get_fitness(self, vector): x = [[[0 for k in xrange(self.vehicle_number)] for j", "True elif x[i][j][k] == 1 and visited: # two cars visited city or", "return max_time #TODO write vectorize solution #TODO unvectorize #TODO implement fitness return 5.0", "v): # index of 0 is 0 and index of 1 is 1", "random from bisect import bisect_left from multiprocessing import cpu_count class VRPTWObjectiveFunction(ObjectiveFunctionInterface): def __init__(self,", "pitch adjustment rate e.g. 0.3 \"\"\" from problemParser import parse_problem from pyharmonysearch import", "def maximize(self): return self._maximize from problemParser import parse_problem from docopt import docopt if", "range(self.customer_number + 1): if x[i][j][k] == 1: time += self.problem_instance['t'][i][j] if time >", "I have one available for use num_processes = 1 num_iterations = 100 (result,", "for j in xrange(self.customer_number + 1)] for i in xrange(self.customer_number + 1)] for", "in range(self.customer_number + 1): visited = False for i in range(self.customer_number + 1):", "import ObjectiveFunctionInterface, harmony_search import random from bisect import bisect_left from multiprocessing import cpu_count", "and visited: # two cars visited city or one car visited city twice", "for j in range(self.customer_number + 1): if x[0][j][k] == 1: car_starts_from_depot = True", "rate accroding to http://scialert.net/qredirect.php?doi=jas.2013.633.638&linkid=pdf return self._par def get_hms(self): return self._hms def get_mpai(self): return", "def is_variable(self, i): return self._variable[i] def is_discrete(self, i): # All variables are discrete", "number of improvisations self._hms = int(arguments['--hms']) #harmony memory size self._hmcr = float(arguments['--hmcr']) #harmony", "compute number of parameters return len(self._discrete_values) def use_random_seed(self): # What ever that means", "= 1 iff vehicle k traveled from i to j # 0 otherwise", "False for j in range(self.customer_number + 1): if x[0][j][k] == 1: car_starts_from_depot =", "# All variables are discrete return True def get_num_parameters(self): # compute number of", "--parxmin=<parmin> Minimal pitch adjustment rate e.g. 0.3 \"\"\" from problemParser import parse_problem from" ]
[ "main as ud_evaluate # noqa: F401 from .ud_train import main as ud_train #", "as ud_evaluate # noqa: F401 from .ud_train import main as ud_train # noqa:", "import main as ud_evaluate # noqa: F401 from .ud_train import main as ud_train", ".conll17_ud_eval import main as ud_evaluate # noqa: F401 from .ud_train import main as", "ud_evaluate # noqa: F401 from .ud_train import main as ud_train # noqa: F401", "<filename>bin/ud/__init__.py from .conll17_ud_eval import main as ud_evaluate # noqa: F401 from .ud_train import", "from .conll17_ud_eval import main as ud_evaluate # noqa: F401 from .ud_train import main" ]
[ "config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name", "== 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from", "== 'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name", "Common common = Common() assert common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd' def", "Common() assert common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config", "common = Common() assert common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass():", "assert config.pipeline == 'my-pipeline' assert config.name == 'demo' config = build_config_from_name( 'demo', branch='my-branch',", "config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch", "test_config_common_dataclass(): from infrastructure.config import Common common = Common() assert common.organization_name == 'igvf-dacc' assert", "'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch', ) assert", "assert config.snapshot_source_db_identifier is None assert config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert", "assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config import", "import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc'", "pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert", "config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name = get_config_name_from_branch('dev') assert config_name ==", "import Common common = Common() assert common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd'", "config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline == 'my-pipeline' assert config.name", "assert common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config import", "assert config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def", "config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER", "== 'demo' config = build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config =", "assert config.name == 'demo' config = build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', )", "get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name = get_config_name_from_branch('dev') assert config_name", "name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert", "config = build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name ==", "assert config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def", "common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config config = Config( name='demo',", "== 'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config config = Config( name='demo', branch='xyz-branch',", "infrastructure.config import Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name ==", "branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier", "config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch' assert config.pipeline", ") assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER", "from infrastructure.config import Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name", "assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert", "infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name = get_config_name_from_branch('dev')", "in config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common common = Common() assert common.organization_name", "from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name =", "from infrastructure.config import Common common = Common() assert common.organization_name == 'igvf-dacc' assert common.project_name", "== 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'my-branch' assert config.pipeline ==", "'my-branch' assert config.pipeline == 'my-pipeline' assert config.name == 'demo' config = build_config_from_name( 'demo',", "def test_config_exists(): from infrastructure.config import config assert 'demo' in config['environment'] def test_config_common_dataclass(): from", "import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', )", "'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config config =", "= build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch',", "config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline", "is None assert config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from", "DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert", "assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert", "config.pipeline == 'my-pipeline' assert config.name == 'demo' config = build_config_from_name( 'demo', branch='my-branch', #", "infrastructure.config import config assert 'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common", "test_config_exists(): from infrastructure.config import config assert 'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config", "test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo',", "def test_config_config_dataclass(): from infrastructure.config import Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', )", "'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert", "build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert", "'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline'", "== 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch' assert config.pipeline ==", "config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name():", "from infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch',", "def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo'", "assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'my-branch' assert", "assert config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import", "== 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config", "= Common() assert common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass(): from", "== DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline == 'my-pipeline' assert config.name ==", "config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name", "import config assert 'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common common", "config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch", "import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name = get_config_name_from_branch('dev') assert", "assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants import", "infrastructure.config import Common common = Common() assert common.organization_name == 'igvf-dacc' assert common.project_name ==", "assert config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature')", "build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert", "Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc'", "test_config_config_dataclass(): from infrastructure.config import Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert", "pytest def test_config_exists(): from infrastructure.config import config assert 'demo' in config['environment'] def test_config_common_dataclass():", "import Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc'", "DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline == 'my-pipeline' assert config.name == 'demo'", ") config = build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name", "build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch', )", "None assert config.branch == 'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config", "config = build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev',", "Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert", "'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch'", "= Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name ==", "Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd'", "assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch' assert", "def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name(", "config.name == 'demo' config = build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config", "'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config config = Config( name='demo', branch='xyz-branch', pipeline='xyz-pipeline',", "def test_config_common_dataclass(): from infrastructure.config import Common common = Common() assert common.organization_name == 'igvf-dacc'", "'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline == 'my-pipeline'", "= build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name ==", "pipeline='xyz-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is", "branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is", "from infrastructure.config import config assert 'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config import", "= build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd'", "config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch", "assert 'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common common = Common()", "'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'my-branch'", "config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch():", "config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name", "= get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name = get_config_name_from_branch('dev') assert config_name == 'dev'", "'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config", "'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'xyz-branch'", "'xyz-branch' assert config.pipeline == 'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants", "branch='my-branch', # Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name", "'xyz-pipeline' def test_config_build_config_from_name(): from infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config =", "import pytest def test_config_exists(): from infrastructure.config import config assert 'demo' in config['environment'] def", "== 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch ==", "config.snapshot_source_db_identifier is None assert config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name", ") assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None", "== 'my-branch' assert config.pipeline == 'my-pipeline' assert config.name == 'demo' config = build_config_from_name(", "== 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name", "infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name ==", "config.branch == 'my-branch' assert config.pipeline == 'my-pipeline' assert config.name == 'demo' config =", "'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name ==", "from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name", "is None assert config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name ==", "branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier", "assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline == 'my-pipeline' assert", "assert common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config config = Config(", "== 'my-pipeline' assert config.name == 'demo' config = build_config_from_name( 'demo', branch='my-branch', # Overrides.", "assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert", "'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name =", "pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier ==", "'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack'", "== 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev' def test_config_build_config_from_branch(): from", "infrastructure.config import build_config_from_name from infrastructure.constants import DEV_DATABASE_IDENTIFIER config = build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline',", "config.name == 'dev' def test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert", "# Overrides. pipeline='my-pipeline', ) config = build_config_from_name( 'dev', branch='my-branch', ) assert config.common.organization_name ==", "assert config.branch == 'my-branch' assert config.pipeline == 'my-pipeline' assert config.name == 'demo' config", "config assert 'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common common =", "build_config_from_name( 'demo', branch='my-branch', pipeline='my-pipeline', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd'", "'demo' config = build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline', ) config = build_config_from_name(", "test_config_build_config_from_branch(): from infrastructure.config import get_config_name_from_branch config_name = get_config_name_from_branch('IGVF-123-add-new-feature') assert config_name == 'demo' config_name", "'my-pipeline' assert config.name == 'demo' config = build_config_from_name( 'demo', branch='my-branch', # Overrides. pipeline='my-pipeline',", "config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier is None assert config.branch == 'my-branch' assert config.pipeline", "== 'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config config", "'demo' in config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common common = Common() assert", "'dev', branch='my-branch', ) assert config.common.organization_name == 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier", "== 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch == 'my-branch' assert config.pipeline ==", "config['environment'] def test_config_common_dataclass(): from infrastructure.config import Common common = Common() assert common.organization_name ==", "common.organization_name == 'igvf-dacc' assert common.project_name == 'igvfd' def test_config_config_dataclass(): from infrastructure.config import Config", "== 'igvf-dacc' assert config.common.project_name == 'igvfd' assert config.snapshot_source_db_identifier == DEV_DATABASE_IDENTIFIER assert config.branch ==", "None assert config.branch == 'my-branch' assert config.pipeline == 'ContinuousDeploymentPipelineStack' assert config.name == 'dev'" ]
[ "from .cube_animations import * try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import importlib_metadata", ".cube_animations import * try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import importlib_metadata __version__", "from .cube import * from .cube_animations import * try: import importlib.metadata as importlib_metadata", "* try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import importlib_metadata __version__ = importlib_metadata.version(__name__)", ".cube import * from .cube_animations import * try: import importlib.metadata as importlib_metadata except", "import * from .cube_animations import * try: import importlib.metadata as importlib_metadata except ModuleNotFoundError:", "import * try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import importlib_metadata __version__ =", "* from .cube_animations import * try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import" ]
[ "[ ('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer',", "2016-02-24 13:22 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration):", "= [ ('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField(", "on 2016-02-24 13:22 from __future__ import unicode_literals from django.db import migrations, models class", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations =", "models class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField(", "model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ), migrations.AddField( model_name='timer', name='no_bell', field=models.BooleanField(default=False), ),", "# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 13:22", "coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 13:22 from __future__", "operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ), migrations.AddField(", "migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ), migrations.AddField( model_name='timer', name='no_bell', field=models.BooleanField(default=False),", "utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 13:22 from __future__ import", "Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh',", "migrations, models class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations = [", "dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ),", "= [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ), migrations.AddField( model_name='timer',", "] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ),", "'0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False),", "Django 1.9.2 on 2016-02-24 13:22 from __future__ import unicode_literals from django.db import migrations,", "1.9.2 on 2016-02-24 13:22 from __future__ import unicode_literals from django.db import migrations, models", "('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed',", "import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('info_timers',", "by Django 1.9.2 on 2016-02-24 13:22 from __future__ import unicode_literals from django.db import", "unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'),", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations", "name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ), migrations.AddField( model_name='timer', name='no_bell', field=models.BooleanField(default=False), ), ]", "<gh_stars>1-10 # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ]", "# Generated by Django 1.9.2 on 2016-02-24 13:22 from __future__ import unicode_literals from", "13:22 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies", "[ migrations.RemoveField( model_name='timer', name='no_refresh', ), migrations.AddField( model_name='timer', name='alarm_until_dismissed', field=models.BooleanField(default=False), ), migrations.AddField( model_name='timer', name='no_bell',", "from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "-*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 13:22 from", "__future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "Generated by Django 1.9.2 on 2016-02-24 13:22 from __future__ import unicode_literals from django.db", "class Migration(migrations.Migration): dependencies = [ ('info_timers', '0005_auto_20160213_1439'), ] operations = [ migrations.RemoveField( model_name='timer',", "-*- # Generated by Django 1.9.2 on 2016-02-24 13:22 from __future__ import unicode_literals" ]
[ "*not* handle phase ambiguity resolution. # # QPSK Example # In this noiseless", "plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat =", "<reponame>ae6nr/digicomm<gh_stars>1-10 # # Symbol Synchronization # # This script tests the efficacy of", "Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2", "# use the first few symbols as a poor man's unique word. This", "plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat", "for QPSK and 16-APSK constellations. # This script does *not* handle phase ambiguity", "a QPSK constellation. # Then we attempt to estimate the frequency offset and", "# QPSK Example # In this noiseless example, we create a series of", "rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2,", "is cheating because we don't have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag,", "as plt from digicomm import * # import my helper functions plt.ion() #", "poor man's unique word. This is cheating because we don't have explicit knowledge", "markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') #", "and 16-APSK constellations. # This script does *not* handle phase ambiguity resolution. #", "Symbol Synchronization # # This script tests the efficacy of frequency offset estimators", "handle phase ambiguity resolution. # # QPSK Example # In this noiseless example,", "create a series of symbols using a QPSK constellation. # Then we attempt", "= addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show()", "plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) #", "constellations. # This script does *not* handle phase ambiguity resolution. # # QPSK", "and derotate accordingly. # If this works correctly, the derotated points should look", "script tests the efficacy of frequency offset estimators for QPSK and 16-APSK constellations.", "digicomm import * # import my helper functions plt.ion() # turn on interactive", "nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms] rx", "Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn off interactive mode plt.show() # keep", "frequency offset estimators for QPSK and 16-APSK constellations. # This script does *not*", "this works correctly, the derotated points should look like the original constellation, but", "estimate the frequency offset and derotate accordingly. # If this works correctly, the", "constellation, but perhaps with a constant phase offset. # # Author: redd #", "np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure()", "of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show()", "the frequency offset and derotate accordingly. # If this works correctly, the derotated", "If this works correctly, the derotated points should look like the original constellation,", "on interactive mode c = getConstellation(type='qpsk') M = len(c) nsyms = 2**13 nbits", "SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True)", "estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1)", "plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first", "markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn off interactive mode", "Author: redd # import numpy as np import scipy as sp from matplotlib", "frequency offset and derotate accordingly. # If this works correctly, the derotated points", "luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first few", "np import scipy as sp from matplotlib import pyplot as plt from digicomm", "to estimate the frequency offset and derotate accordingly. # If this works correctly,", "# estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.',", "numpy as np import scipy as sp from matplotlib import pyplot as plt", "should look like the original constellation, but perhaps with a constant phase offset.", "this noiseless example, we create a series of symbols using a QPSK constellation.", "bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.',", "plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat)", "rx_2[0:luw], tx[0:luw]) # use the first few symbols as a poor man's unique", "len(c) nsyms = 2**13 nbits = nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms", "nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation", "knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True)", "as a poor man's unique word. This is cheating because we don't have", "plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3", "tests the efficacy of frequency offset estimators for QPSK and 16-APSK constellations. #", "M = len(c) nsyms = 2**13 nbits = nsyms * int(np.log2(M)) bits =", "rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first few symbols as a", "of symbols using a QPSK constellation. # Then we attempt to estimate the", "functions plt.ion() # turn on interactive mode c = getConstellation(type='qpsk') M = len(c)", "look like the original constellation, but perhaps with a constant phase offset. #", "= freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure()", "plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn off interactive mode plt.show()", "plt.ion() # turn on interactive mode c = getConstellation(type='qpsk') M = len(c) nsyms", "= getConstellation(type='qpsk') M = len(c) nsyms = 2**13 nbits = nsyms * int(np.log2(M))", "don't have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase", "matplotlib import pyplot as plt from digicomm import * # import my helper", "offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated')", "addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset')", "mode c = getConstellation(type='qpsk') M = len(c) nsyms = 2**13 nbits = nsyms", "man's unique word. This is cheating because we don't have explicit knowledge of", "plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use", "= phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first few symbols as a poor", "plt.grid(True) plt.show() plt.ioff() # turn off interactive mode plt.show() # keep plots visible", "= nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms]", "import scipy as sp from matplotlib import pyplot as plt from digicomm import", "offset and derotate accordingly. # If this works correctly, the derotated points should", "script does *not* handle phase ambiguity resolution. # # QPSK Example # In", "derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64", "'.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw],", "import * # import my helper functions plt.ion() # turn on interactive mode", "= 2**13 nbits = nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M)", "c = getConstellation(type='qpsk') M = len(c) nsyms = 2**13 nbits = nsyms *", "plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3 =", "phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first few symbols as a poor man's", "# Author: redd # import numpy as np import scipy as sp from", "accordingly. # If this works correctly, the derotated points should look like the", "nbits = nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx =", "Synchronization # # This script tests the efficacy of frequency offset estimators for", "plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency", "of frequency offset estimators for QPSK and 16-APSK constellations. # This script does", "16-APSK constellations. # This script does *not* handle phase ambiguity resolution. # #", "rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with", "works correctly, the derotated points should look like the original constellation, but perhaps", "= bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag,", "freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real,", "This script tests the efficacy of frequency offset estimators for QPSK and 16-APSK", "syms = bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real,", "Resolution') plt.grid(True) plt.show() plt.ioff() # turn off interactive mode plt.show() # keep plots", "few symbols as a poor man's unique word. This is cheating because we", "int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)),", "the first few symbols as a poor man's unique word. This is cheating", "first few symbols as a poor man's unique word. This is cheating because", "from digicomm import * # import my helper functions plt.ion() # turn on", "turn on interactive mode c = getConstellation(type='qpsk') M = len(c) nsyms = 2**13", "offset. # # Author: redd # import numpy as np import scipy as", "as np import scipy as sp from matplotlib import pyplot as plt from", "we create a series of symbols using a QPSK constellation. # Then we", "noiseless example, we create a series of symbols using a QPSK constellation. #", "attempt to estimate the frequency offset and derotate accordingly. # If this works", "QPSK and 16-APSK constellations. # This script does *not* handle phase ambiguity resolution.", "phase offset. # # Author: redd # import numpy as np import scipy", "tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff()", "rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx,", "= c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal')", "points should look like the original constellation, but perhaps with a constant phase", "# # This script tests the efficacy of frequency offset estimators for QPSK", "* int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms] rx =", "tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1)", "# This script tests the efficacy of frequency offset estimators for QPSK and", "= addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency", "because we don't have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1)", "plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn", "unique word. This is cheating because we don't have explicit knowledge of tx.", "constellation. # Then we attempt to estimate the frequency offset and derotate accordingly.", "* # import my helper functions plt.ion() # turn on interactive mode c", "symbols as a poor man's unique word. This is cheating because we don't", "efficacy of frequency offset estimators for QPSK and 16-APSK constellations. # This script", "# derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw =", "This is cheating because we don't have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real,", "we attempt to estimate the frequency offset and derotate accordingly. # If this", "plt from digicomm import * # import my helper functions plt.ion() # turn", "my helper functions plt.ion() # turn on interactive mode c = getConstellation(type='qpsk') M", "a series of symbols using a QPSK constellation. # Then we attempt to", "Then we attempt to estimate the frequency offset and derotate accordingly. # If", "as sp from matplotlib import pyplot as plt from digicomm import * #", "# In this noiseless example, we create a series of symbols using a", "the derotated points should look like the original constellation, but perhaps with a", "does *not* handle phase ambiguity resolution. # # QPSK Example # In this", "# # Symbol Synchronization # # This script tests the efficacy of frequency", "getConstellation(type='qpsk') M = len(c) nsyms = 2**13 nbits = nsyms * int(np.log2(M)) bits", "frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal')", "bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10,", "rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True)", "# turn on interactive mode c = getConstellation(type='qpsk') M = len(c) nsyms =", "use the first few symbols as a poor man's unique word. This is", "import numpy as np import scipy as sp from matplotlib import pyplot as", "# import my helper functions plt.ion() # turn on interactive mode c =", "like the original constellation, but perhaps with a constant phase offset. # #", "= 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first few symbols", "interactive mode c = getConstellation(type='qpsk') M = len(c) nsyms = 2**13 nbits =", "QPSK constellation. # Then we attempt to estimate the frequency offset and derotate", "2**13 nbits = nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx", "Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2 =", "= np.random.randint(0,2,size=(nbits,)) syms = bitsToSymbols(bits,M) tx = c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2)", "resolution. # # QPSK Example # In this noiseless example, we create a", "helper functions plt.ion() # turn on interactive mode c = getConstellation(type='qpsk') M =", "markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw])", "64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the first few symbols as", "# This script does *not* handle phase ambiguity resolution. # # QPSK Example", "word. This is cheating because we don't have explicit knowledge of tx. plt.figure()", "have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity", "offset estimators for QPSK and 16-APSK constellations. # This script does *not* handle", "plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate", "mode='interp_1') # estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag,", "import my helper functions plt.ion() # turn on interactive mode c = getConstellation(type='qpsk')", "nsyms = 2**13 nbits = nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,)) syms =", "c[syms] rx = addNoise(addPhaseOffset(addFrequencyOffset(tx,nuT=0.01)), SNR=10, Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols", "example, we create a series of symbols using a QPSK constellation. # Then", "from matplotlib import pyplot as plt from digicomm import * # import my", "derotate accordingly. # If this works correctly, the derotated points should look like", "derotated points should look like the original constellation, but perhaps with a constant", "# # Author: redd # import numpy as np import scipy as sp", "# Then we attempt to estimate the frequency offset and derotate accordingly. #", "series of symbols using a QPSK constellation. # Then we attempt to estimate", "Eb=1/2) plt.figure() plt.plot(rx.real, rx.imag, '.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show()", "but perhaps with a constant phase offset. # # Author: redd # import", "a poor man's unique word. This is cheating because we don't have explicit", "rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn off", "pyplot as plt from digicomm import * # import my helper functions plt.ion()", "In this noiseless example, we create a series of symbols using a QPSK", "plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset rx_2 = addFrequencyOffset(rx,nuT=-nuT_hat) #", "using a QPSK constellation. # Then we attempt to estimate the frequency offset", "'.', markersize=1) plt.axis('equal') plt.title('Symbols with Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1')", "# # QPSK Example # In this noiseless example, we create a series", "scipy as sp from matplotlib import pyplot as plt from digicomm import *", "Example # In this noiseless example, we create a series of symbols using", "correctly, the derotated points should look like the original constellation, but perhaps with", "plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() #", "with a constant phase offset. # # Author: redd # import numpy as", "explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution')", "tx[0:luw]) # use the first few symbols as a poor man's unique word.", "import pyplot as plt from digicomm import * # import my helper functions", "estimators for QPSK and 16-APSK constellations. # This script does *not* handle phase", "plt.grid(True) plt.show() luw = 64 rx_3 = phaseAmbiguityResolution(rx_2, rx_2[0:luw], tx[0:luw]) # use the", "the original constellation, but perhaps with a constant phase offset. # # Author:", "plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn off interactive mode plt.show() #", "constant phase offset. # # Author: redd # import numpy as np import", "= len(c) nsyms = 2**13 nbits = nsyms * int(np.log2(M)) bits = np.random.randint(0,2,size=(nbits,))", "QPSK Example # In this noiseless example, we create a series of symbols", "'.', markersize=1) plt.axis('square') plt.title('Phase Ambiguity Resolution') plt.grid(True) plt.show() plt.ioff() # turn off interactive", "a constant phase offset. # # Author: redd # import numpy as np", "original constellation, but perhaps with a constant phase offset. # # Author: redd", "we don't have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.', markersize=1) plt.axis('square')", "# If this works correctly, the derotated points should look like the original", "the efficacy of frequency offset estimators for QPSK and 16-APSK constellations. # This", "This script does *not* handle phase ambiguity resolution. # # QPSK Example #", "perhaps with a constant phase offset. # # Author: redd # import numpy", "symbols using a QPSK constellation. # Then we attempt to estimate the frequency", "addFrequencyOffset(rx,nuT=-nuT_hat) # derotation plt.figure() plt.plot(rx_2.real, rx_2.imag, '.', markersize=1) plt.axis('equal') plt.title('Derotated') plt.grid(True) plt.show() luw", "# Symbol Synchronization # # This script tests the efficacy of frequency offset", "phase ambiguity resolution. # # QPSK Example # In this noiseless example, we", "cheating because we don't have explicit knowledge of tx. plt.figure() plt.plot(rx_3.real, rx_3.imag, '.',", "# import numpy as np import scipy as sp from matplotlib import pyplot", "ambiguity resolution. # # QPSK Example # In this noiseless example, we create", "redd # import numpy as np import scipy as sp from matplotlib import", "sp from matplotlib import pyplot as plt from digicomm import * # import", "with Frequency Offset') plt.grid(True) plt.show() nuT_hat = freqOffsetEstimationQpsk(rx, mode='interp_1') # estimate frequency offset" ]
[ "path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = []", "# -*- coding: utf-8 -*- from __future__ import unicode_literals def directory_splitter(path,include_filename = False):", "= '' for partial_path in path_hierarchy: paths.append(current_path) if current_path != '': current_path+='/' current_path+=partial_path", "import unicode_literals def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy", "partial_path in path_hierarchy: paths.append(current_path) if current_path != '': current_path+='/' current_path+=partial_path paths.append(current_path) return paths", "current_path = '' for partial_path in path_hierarchy: paths.append(current_path) if current_path != '': current_path+='/'", "if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy =", "coding: utf-8 -*- from __future__ import unicode_literals def directory_splitter(path,include_filename = False): if include_filename:", "path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths", "= [] current_path = '' for partial_path in path_hierarchy: paths.append(current_path) if current_path !=", "[] current_path = '' for partial_path in path_hierarchy: paths.append(current_path) if current_path != '':", "-*- from __future__ import unicode_literals def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy =", "def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1]", "directory_splitter(path,include_filename = False): if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if", "include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:]", "= False): if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'):", "path_hierarchy = path_hierarchy[1:] paths = [] current_path = '' for partial_path in path_hierarchy:", "= path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths =", "path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = [] current_path = '' for", "path_hierarchy[1:] paths = [] current_path = '' for partial_path in path_hierarchy: paths.append(current_path) if", "utf-8 -*- from __future__ import unicode_literals def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy", "<filename>checkmate/lib/stats/helpers.py # -*- coding: utf-8 -*- from __future__ import unicode_literals def directory_splitter(path,include_filename =", "from __future__ import unicode_literals def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy = path.split(\"/\")", "__future__ import unicode_literals def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy = path.split(\"/\") else:", "unicode_literals def directory_splitter(path,include_filename = False): if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy =", "= path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = [] current_path = ''", "path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = [] current_path = '' for partial_path in", "'' for partial_path in path_hierarchy: paths.append(current_path) if current_path != '': current_path+='/' current_path+=partial_path paths.append(current_path)", "False): if include_filename: path_hierarchy = path.split(\"/\") else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy", "for partial_path in path_hierarchy: paths.append(current_path) if current_path != '': current_path+='/' current_path+=partial_path paths.append(current_path) return", "= path_hierarchy[1:] paths = [] current_path = '' for partial_path in path_hierarchy: paths.append(current_path)", "path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = [] current_path =", "else: path_hierarchy = path.split(\"/\")[:-1] if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = [] current_path", "-*- coding: utf-8 -*- from __future__ import unicode_literals def directory_splitter(path,include_filename = False): if", "paths = [] current_path = '' for partial_path in path_hierarchy: paths.append(current_path) if current_path", "if path.startswith('/'): path_hierarchy = path_hierarchy[1:] paths = [] current_path = '' for partial_path" ]
[ "True self.server.daemon_threads = True ## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket", "self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start()", "break self.request.send(data) except Exception: pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port,", "remote_host chain_port = remote_port ssh_transport = transport onConnect = self._onConnect onDisconnect = self._onDisconnect", "# server_thread.daemon = True # server_thread.start() class TunServer: def __init__(self, local_port, remote_host, remote_port,", "self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request to %s:%d failed: %s\" %", "SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r -> %r ->", "chain_host = remote_host # chain_port = remote_port # ssh_transport = transport # #", "True ## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile,", "to %s:%d was rejected by the SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected!", "import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),)", "# server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start() class TunServer: def", "return if chan is None: logging.debug(\"Incoming request to %s:%d was rejected by the", "transport onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect = None", "and is released under the \"MIT License Agreement\". Please see the LICENSE #", "= self.request.recv(1024) if len(data) == 0: break chan.send(data) if chan in r: data", "остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True)", "self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target =", "# ssh_transport = transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\",", "logging.debug(\"Tun: closed from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer) pass def stop(self): self.server.shutdown()", "SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport = transport onConnect = self._onConnect", "(peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" %", "file that should have been included as part of this package. import select", "import socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host,", "if self.request in r: data = self.request.recv(1024) if len(data) == 0: break chan.send(data)", "for Windows, # and is released under the \"MIT License Agreement\". Please see", "Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as", "Exception as e: logging.debug(\"Incoming request to %s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e)))", "chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w, x = select.select([self.request, chan],", "self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True", "(self.chain_host, self.chain_port, repr(e))) return if chan is None: logging.debug(\"Incoming request to %s:%d was", "server = ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True", "open %r -> %r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try:", "def _onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass def", "# All rights reserved. # This file is part of the Intsa Term", "# class SubHander(Handler): # chain_host = remote_host # chain_port = remote_port # ssh_transport", "try: while True: r, w, x = select.select([self.request, chan], [], []) if self.request", "<<EMAIL>>. # All rights reserved. # This file is part of the Intsa", "this is a little convoluted, but lets me configure things for the Handler", "if len(data) == 0: break chan.send(data) if chan in r: data = chan.recv(1024)", "the Handler # # object. (socketserver doesn't give Handlers any way to access", "#self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start')", "= True # server_thread.start() class TunServer: def __init__(self, local_port, remote_host, remote_port, transport): class", "if chan is None: logging.debug(\"Incoming request to %s:%d was rejected by the SSH", "outer # # server normally.) # class SubHander(Handler): # chain_host = remote_host #", "the Intsa Term Client - X2Go terminal client for Windows, # and is", "\"MIT License Agreement\". Please see the LICENSE # file that should have been", "reserved. # This file is part of the Intsa Term Client - X2Go", "(self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request to %s:%d failed: %s\"", "under the \"MIT License Agreement\". Please see the LICENSE # file that should", "remote_host, remote_port, transport): class SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport =", "self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\"", "import select import threading import socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try:", "transport): class SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport = transport onConnect", "peer): logging.debug(\"Tun: closed from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer) pass def stop(self):", "peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # #", "= remote_host chain_port = remote_port ssh_transport = transport onConnect = self._onConnect onDisconnect =", "self.onConnect(self.request.getpeername()) try: while True: r, w, x = select.select([self.request, chan], [], []) if", "chain_port = remote_port # ssh_transport = transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() #", "= True self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,))", "is released under the \"MIT License Agreement\". Please see the LICENSE # file", "configure things for the Handler # # object. (socketserver doesn't give Handlers any", "remote_host # chain_port = remote_port # ssh_transport = transport # # #ForwardServer((\"\", local_port),", "Agreement\". Please see the LICENSE # file that should have been included as", "local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) #", "data = self.request.recv(1024) if len(data) == 0: break chan.send(data) if chan in r:", "access the outer # # server normally.) # class SubHander(Handler): # chain_host =", "remote_host, remote_port, transport): # # this is a little convoluted, but lets me", "% (self.chain_host, self.chain_port, repr(e))) return if chan is None: logging.debug(\"Incoming request to %s:%d", "socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True ## остатки от веб-сервера", "def __init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host = remote_host chain_port =", "Tunnel open %r -> %r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername())", "# chain_port = remote_port # ssh_transport = transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever()", "SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start() class TunServer:", "client for Windows, # and is released under the \"MIT License Agreement\". Please", "# file that should have been included as part of this package. import", "things for the Handler # # object. (socketserver doesn't give Handlers any way", "self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect:", "w, x = select.select([self.request, chan], [], []) if self.request in r: data =", "class SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport = transport onConnect =", "of this package. import select import threading import socketserver import logging class Handler(socketserver.BaseRequestHandler):", "__init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host = remote_host chain_port = remote_port", "the outer # # server normally.) # class SubHander(Handler): # chain_host = remote_host", "onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect = None self.server", "except Exception: pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port,", "is a little convoluted, but lets me configure things for the Handler #", "server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start() class TunServer: def __init__(self,", "## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile,", "# # object. (socketserver doesn't give Handlers any way to access the outer", "terminal client for Windows, # and is released under the \"MIT License Agreement\".", "except Exception as e: logging.debug(\"Incoming request to %s:%d failed: %s\" % (self.chain_host, self.chain_port,", "(socketserver doesn't give Handlers any way to access the outer # # server", "while True: r, w, x = select.select([self.request, chan], [], []) if self.request in", "Exception: pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport):", "#def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this is a little convoluted, but", "%r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from", "-> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w,", "= transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander)", "= remote_port # ssh_transport = transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server", "if chan in r: data = chan.recv(1024) if len(data) == 0: break self.request.send(data)", "def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer) pass", "LICENSE # file that should have been included as part of this package.", "#print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect: self.onConnect(peer)", "Client - X2Go terminal client for Windows, # and is released under the", "== 0: break chan.send(data) if chan in r: data = chan.recv(1024) if len(data)", "# this is a little convoluted, but lets me configure things for the", "True self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,)) if", "len(data) == 0: break chan.send(data) if chan in r: data = chan.recv(1024) if", "[]) if self.request in r: data = self.request.recv(1024) if len(data) == 0: break", "None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True ##", "request to %s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return if chan is", "for the Handler # # object. (socketserver doesn't give Handlers any way to", "by <NAME> <<EMAIL>>. # All rights reserved. # This file is part of", "as part of this package. import select import threading import socketserver import logging", "server normally.) # class SubHander(Handler): # chain_host = remote_host # chain_port = remote_port", "part of the Intsa Term Client - X2Go terminal client for Windows, #", "is None: logging.debug(\"Incoming request to %s:%d was rejected by the SSH server.\" %", "threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start() class TunServer: def __init__(self, local_port, remote_host,", "self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads =", "= ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon", "License Agreement\". Please see the LICENSE # file that should have been included", "class TunServer: def __init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host = remote_host", "[], []) if self.request in r: data = self.request.recv(1024) if len(data) == 0:", "if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" % (peer,))", "self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True ## остатки", "closed from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer) pass def stop(self): self.server.shutdown() self.server_thread.join(5)", "self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this is a little convoluted,", "self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this is a little", "lets me configure things for the Handler # # object. (socketserver doesn't give", "select.select([self.request, chan], [], []) if self.request in r: data = self.request.recv(1024) if len(data)", "self.request.send(data) except Exception: pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host,", "socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port),", "object. (socketserver doesn't give Handlers any way to access the outer # #", "%s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return if chan is None: logging.debug(\"Incoming", "chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request to", "in r: data = chan.recv(1024) if len(data) == 0: break self.request.send(data) except Exception:", "len(data) == 0: break self.request.send(data) except Exception: pass peername = self.request.getpeername() chan.close() self.request.close()", "True: r, w, x = select.select([self.request, chan], [], []) if self.request in r:", "pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer)", "= None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True", "ssh_transport = transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port),", "_onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self,", "веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate()", "= socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True ## остатки от", "to access the outer # # server normally.) # class SubHander(Handler): # chain_host", "% (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r -> %r -> %r\" %", "chan.send(data) if chan in r: data = chan.recv(1024) if len(data) == 0: break", "is part of the Intsa Term Client - X2Go terminal client for Windows,", "repr(e))) return if chan is None: logging.debug(\"Incoming request to %s:%d was rejected by", "logging.debug(\"Incoming request to %s:%d was rejected by the SSH server.\" % (self.chain_host, self.chain_port))", "Please see the LICENSE # file that should have been included as part", "server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r -> %r -> %r\"", "(self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r -> %r -> %r\" % (self.request.getpeername(),", "transport): # # this is a little convoluted, but lets me configure things", "self._onConnect onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\",", "to %s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return if chan is None:", "SubHander(Handler): # chain_host = remote_host # chain_port = remote_port # ssh_transport = transport", "None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads", "2020 by <NAME> <<EMAIL>>. # All rights reserved. # This file is part", "# server_thread.start() class TunServer: def __init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host", "released under the \"MIT License Agreement\". Please see the LICENSE # file that", "self.server.daemon_threads = True ## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket =", "= select.select([self.request, chan], [], []) if self.request in r: data = self.request.recv(1024) if", "# # server normally.) # class SubHander(Handler): # chain_host = remote_host # chain_port", "Windows, # and is released under the \"MIT License Agreement\". Please see the", "True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun", "self.chain_port)) return logging.debug(\"Connected! Tunnel open %r -> %r -> %r\" % (self.request.getpeername(), chan.getpeername(),", "#self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start') def", "All rights reserved. # This file is part of the Intsa Term Client", "= socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread =", "from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer) pass def stop(self): self.server.shutdown() self.server_thread.join(5) self.server.server_close()", "local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport", "True # server_thread.start() class TunServer: def __init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler):", "give Handlers any way to access the outer # # server normally.) #", "normally.) # class SubHander(Handler): # chain_host = remote_host # chain_port = remote_port #", "logging.debug(\"Incoming request to %s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return if chan", "(self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w, x = select.select([self.request, chan], [],", "rejected by the SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r", "the SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r -> %r", "data = chan.recv(1024) if len(data) == 0: break self.request.send(data) except Exception: pass peername", "<reponame>mrlinqu/intsa_term_client # Copyright 2020 by <NAME> <<EMAIL>>. # All rights reserved. # This", "break chan.send(data) if chan in r: data = chan.recv(1024) if len(data) == 0:", "remote_port ssh_transport = transport onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect = None", "was rejected by the SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open", "# This file is part of the Intsa Term Client - X2Go terminal", "def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e:", "%r -> %r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while", "0: break chan.send(data) if chan in r: data = chan.recv(1024) if len(data) ==", "ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon =", "been included as part of this package. import select import threading import socketserver", "convoluted, but lets me configure things for the Handler # # object. (socketserver", "%s:%d was rejected by the SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel", "ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start()", "This file is part of the Intsa Term Client - X2Go terminal client", "%s\" % (self.chain_host, self.chain_port, repr(e))) return if chan is None: logging.debug(\"Incoming request to", "see the LICENSE # file that should have been included as part of", "included as part of this package. import select import threading import socketserver import", "-> %r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True:", "# server normally.) # class SubHander(Handler): # chain_host = remote_host # chain_port =", "= True self.server.daemon_threads = True ## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type)", "of the Intsa Term Client - X2Go terminal client for Windows, # and", "that should have been included as part of this package. import select import", "way to access the outer # # server normally.) # class SubHander(Handler): #", "logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except", "as e: logging.debug(\"Incoming request to %s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return", "but lets me configure things for the Handler # # object. (socketserver doesn't", "= remote_port ssh_transport = transport onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect =", "= self._onConnect onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect = None self.server =", "select import threading import socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan", "threading import socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\",", "= remote_host # chain_port = remote_port # ssh_transport = transport # # #ForwardServer((\"\",", "= threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start') def _onConnect(self, peer):", "#tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread", "% (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w, x =", "onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port),", "doesn't give Handlers any way to access the outer # # server normally.)", "%r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w, x", "= threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start() class TunServer: def __init__(self, local_port,", "class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception", "should have been included as part of this package. import select import threading", "Intsa Term Client - X2Go terminal client for Windows, # and is released", "the \"MIT License Agreement\". Please see the LICENSE # file that should have", "part of this package. import select import threading import socketserver import logging class", "socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target", "self.server_thread = threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start') def _onConnect(self,", "self.request.recv(1024) if len(data) == 0: break chan.send(data) if chan in r: data =", "return logging.debug(\"Connected! Tunnel open %r -> %r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host,", "SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True ## остатки от веб-сервера #tcp_socket =", "%r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r,", "None: logging.debug(\"Incoming request to %s:%d was rejected by the SSH server.\" % (self.chain_host,", "self.request in r: data = self.request.recv(1024) if len(data) == 0: break chan.send(data) if", "self.onConnect = None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address =", "# # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander) # server_thread", "#ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever)", "file is part of the Intsa Term Client - X2Go terminal client for", "= self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request to %s:%d", "remote_port # ssh_transport = transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server =", "self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this is", "request to %s:%d was rejected by the SSH server.\" % (self.chain_host, self.chain_port)) return", "# chain_host = remote_host # chain_port = remote_port # ssh_transport = transport #", "package. import select import threading import socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self):", "SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon", "start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass", "self._onDisconnect self.onConnect = None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address", "this package. import select import threading import socketserver import logging class Handler(socketserver.BaseRequestHandler): def", "handle(self): try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming", "chan is None: logging.debug(\"Incoming request to %s:%d was rejected by the SSH server.\"", "- X2Go terminal client for Windows, # and is released under the \"MIT", "= chan.recv(1024) if len(data) == 0: break self.request.send(data) except Exception: pass peername =", "= True ## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket,", "= self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open", "= self._onDisconnect self.onConnect = None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander)", "e: logging.debug(\"Incoming request to %s:%d failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return if", "# and is released under the \"MIT License Agreement\". Please see the LICENSE", "x = select.select([self.request, chan], [], []) if self.request in r: data = self.request.recv(1024)", "(self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w, x = select.select([self.request,", "open %r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed", "by the SSH server.\" % (self.chain_host, self.chain_port)) return logging.debug(\"Connected! Tunnel open %r ->", "if len(data) == 0: break self.request.send(data) except Exception: pass peername = self.request.getpeername() chan.close()", "Handler # # object. (socketserver doesn't give Handlers any way to access the", "Copyright 2020 by <NAME> <<EMAIL>>. # All rights reserved. # This file is", "the LICENSE # file that should have been included as part of this", "= ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True #", "r, w, x = select.select([self.request, chan], [], []) if self.request in r: data", "# object. (socketserver doesn't give Handlers any way to access the outer #", "local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon = True # server_thread.start() class", "chan.recv(1024) if len(data) == 0: break self.request.send(data) except Exception: pass peername = self.request.getpeername()", "= transport onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect =", "self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request to %s:%d failed: %s\" % (self.chain_host,", "little convoluted, but lets me configure things for the Handler # # object.", "self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" % (peer,)) if self.onDisconnect:", "local_port), SubHander) self.server.allow_reuse_address = True self.server.daemon_threads = True ## остатки от веб-сервера #tcp_socket", "ssh_transport = transport onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect = None self.onDisconnect", "self.server.allow_reuse_address = True self.server.daemon_threads = True ## остатки от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family,", "== 0: break self.request.send(data) except Exception: pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername)", "any way to access the outer # # server normally.) # class SubHander(Handler):", "% (peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\"", "try: chan = self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request", "transport # # #ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander) #", "self.chain_port),)) self.onConnect(self.request.getpeername()) try: while True: r, w, x = select.select([self.request, chan], [], [])", "a little convoluted, but lets me configure things for the Handler # #", "me configure things for the Handler # # object. (socketserver doesn't give Handlers", "server_thread.daemon = True # server_thread.start() class TunServer: def __init__(self, local_port, remote_host, remote_port, transport):", "# # this is a little convoluted, but lets me configure things for", "= self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this", "от веб-сервера #tcp_socket = socket.socket(self.httpd.address_family, self.httpd.socket_type) #self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind()", "failed: %s\" % (self.chain_host, self.chain_port, repr(e))) return if chan is None: logging.debug(\"Incoming request", "TunServer: def __init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host = remote_host chain_port", "pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): #", "ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this is a little convoluted, but lets", "rights reserved. # This file is part of the Intsa Term Client -", "X2Go terminal client for Windows, # and is released under the \"MIT License", "remote_port, transport): # # this is a little convoluted, but lets me configure", "Handlers any way to access the outer # # server normally.) # class", "logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun:", "chan in r: data = chan.recv(1024) if len(data) == 0: break self.request.send(data) except", "chan], [], []) if self.request in r: data = self.request.recv(1024) if len(data) ==", "0: break self.request.send(data) except Exception: pass peername = self.request.getpeername() chan.close() self.request.close() self.onDisconnect(peername) #def", "# Copyright 2020 by <NAME> <<EMAIL>>. # All rights reserved. # This file", "r: data = self.request.recv(1024) if len(data) == 0: break chan.send(data) if chan in", "remote_port, transport): class SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport = transport", "import threading import socketserver import logging class Handler(socketserver.BaseRequestHandler): def handle(self): try: chan =", "r: data = chan.recv(1024) if len(data) == 0: break self.request.send(data) except Exception: pass", "_onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" % (peer,)) if self.onDisconnect: self.onDisconnect(peer) pass def", "threading.Thread(target = self.server.serve_forever) #self.server_thread.daemon = True self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun:", "Term Client - X2Go terminal client for Windows, # and is released under", "have been included as part of this package. import select import threading import", "class SubHander(Handler): # chain_host = remote_host # chain_port = remote_port # ssh_transport =", "#self.httpd.socket = ssl.wrap_socket(tcp_socket, self.config.privkeyfile, self.config.pubkeyfile, True) #self.httpd.server_bind() #self.httpd.server_activate() self.server_thread = threading.Thread(target = self.server.serve_forever)", "server_thread.start() class TunServer: def __init__(self, local_port, remote_host, remote_port, transport): class SubHander(Handler): chain_host =", "chan.close() self.request.close() self.onDisconnect(peername) #def ssh_forward_tunnel(local_port, remote_host, remote_port, transport): # # this is a", "self.ssh_transport.open_channel(\"direct-tcpip\", (self.chain_host, self.chain_port), self.request.getpeername(),) except Exception as e: logging.debug(\"Incoming request to %s:%d failed:", "in r: data = self.request.recv(1024) if len(data) == 0: break chan.send(data) if chan", "# server = ForwardServer((\"\", local_port), SubHander) # server_thread = threading.Thread(target=server.serve_forever) # server_thread.daemon =", "chain_port = remote_port ssh_transport = transport onConnect = self._onConnect onDisconnect = self._onDisconnect self.onConnect", "<NAME> <<EMAIL>>. # All rights reserved. # This file is part of the", "self.chain_port, repr(e))) return if chan is None: logging.debug(\"Incoming request to %s:%d was rejected", "self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer): logging.debug(\"Tun: closed from %r\" % (peer,)) if", "#self.server_thread.daemon = True self.server_thread.start() #print('tun start') def _onConnect(self, peer): logging.debug(\"Tun: open %r\" %", "logging.debug(\"Connected! Tunnel open %r -> %r -> %r\" % (self.request.getpeername(), chan.getpeername(), (self.chain_host, self.chain_port),))", "peer): logging.debug(\"Tun: open %r\" % (peer,)) if self.onConnect: self.onConnect(peer) pass def _onDisconnect(self, peer):", "= None self.onDisconnect = None self.server = socketserver.ThreadingTCPServer((\"\", local_port), SubHander) self.server.allow_reuse_address = True", "# #ForwardServer((\"\", local_port), SubHander).serve_forever() # server = ForwardServer((\"\", local_port), SubHander) # server_thread =", "chain_host = remote_host chain_port = remote_port ssh_transport = transport onConnect = self._onConnect onDisconnect" ]
[ "belongs to. A user will get all permissions granted to each of their", "blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Permission\", verbose_name=\"user permissions\", ), ),", "dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\",", "each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\",", "to. A user will get all permissions granted to each of their groups.\",", "user will get all permissions granted to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\",", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"),", "get all permissions granted to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\",", "# Generated by Django 3.2 on 2022-01-20 21:39 from django.db import migrations, models", "on 2022-01-20 21:39 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "[ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField(", "operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs", "name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Permission\", verbose_name=\"user permissions\",", "class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [", "Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField(", "21:39 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"),", "2022-01-20 21:39 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ (\"auth\",", "field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs to. A user will get all", "to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\",", "= [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs to.", "groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific", "help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Permission\", verbose_name=\"user permissions\", ), ), ]", "(\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups", "), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\",", "by Django 3.2 on 2022-01-20 21:39 from django.db import migrations, models class Migration(migrations.Migration):", "this user belongs to. A user will get all permissions granted to each", "model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Permission\", verbose_name=\"user", "A user will get all permissions granted to each of their groups.\", related_name=\"user_set\",", "to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\",", "migrations, models class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations", "migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs to. A user", "groups this user belongs to. A user will get all permissions granted to", "name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs to. A user will get", "their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True,", "granted to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField(", "related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for", "blank=True, help_text=\"The groups this user belongs to. A user will get all permissions", "\"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this", "will get all permissions granted to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\",", "3.2 on 2022-01-20 21:39 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Permission\",", "models class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations =", "field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Permission\", verbose_name=\"user permissions\", ),", "Generated by Django 3.2 on 2022-01-20 21:39 from django.db import migrations, models class", "model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs to. A user will", "verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\",", "] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user", "help_text=\"The groups this user belongs to. A user will get all permissions granted", "), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions for this user.\", related_name=\"user_set\", related_query_name=\"user\",", "user belongs to. A user will get all permissions granted to each of", "permissions granted to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ),", "of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField(", "related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ), ), migrations.AddField( model_name=\"businessowner\", name=\"user_permissions\", field=models.ManyToManyField( blank=True, help_text=\"Specific permissions", "[ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The groups this user belongs to. A", "import migrations, models class Migration(migrations.Migration): dependencies = [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ]", "\"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True, help_text=\"The", "Django 3.2 on 2022-01-20 21:39 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "(\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\", field=models.ManyToManyField( blank=True,", "all permissions granted to each of their groups.\", related_name=\"user_set\", related_query_name=\"user\", to=\"auth.Group\", verbose_name=\"groups\", ),", "= [ (\"auth\", \"0012_alter_user_first_name_max_length\"), (\"auth_app\", \"0002_businessowner_is_superuser\"), ] operations = [ migrations.AddField( model_name=\"businessowner\", name=\"groups\"," ]
[ "from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict: with open(config_path, \"r\") as ymlfile:", "EventHandler def _read_config(config_path) -> dict: with open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile)", "-> dict: with open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile) return config def", "dict: with open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile) return config def test_version():", "from dixday_predictions import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict: with", "_read_config(config_path) -> dict: with open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile) return config", "import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict: with open(config_path, \"r\")", "import csv import yaml from dixday_predictions import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def", "import yaml from dixday_predictions import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) ->", "dixday_predictions import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict: with open(config_path,", "__version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict: with open(config_path, \"r\") as", "as ymlfile: config = yaml.safe_load(ymlfile) return config def test_version(): assert __version__ == '0.1.5'", "\"r\") as ymlfile: config = yaml.safe_load(ymlfile) return config def test_version(): assert __version__ ==", "dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict: with open(config_path, \"r\") as ymlfile: config", "open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile) return config def test_version(): assert __version__", "with open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile) return config def test_version(): assert", "csv import yaml from dixday_predictions import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path)", "import EventHandler def _read_config(config_path) -> dict: with open(config_path, \"r\") as ymlfile: config =", "def _read_config(config_path) -> dict: with open(config_path, \"r\") as ymlfile: config = yaml.safe_load(ymlfile) return", "yaml from dixday_predictions import __version__ from dixday_predictions.eventhandler.EventHandler import EventHandler def _read_config(config_path) -> dict:" ]
[ "def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)',", "@staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database =", "conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups() return db_type,", "<filename>helpers/db_helpers.py import os import re env = os.environ class DbHelpers(object): \"\"\" Database helpers", "class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str", "env = os.environ class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod def", ":author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password,", "= os.environ class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string,", "import re env = os.environ class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\"", "user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups() return db_type, user, password, host, database", "Database helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type,", "env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups() return db_type, user, password,", "): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups() return", "re env = os.environ class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod", "helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user,", "\"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database", "= env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups() return db_type, user,", "db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups() return db_type, user, password, host,", "os.environ class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ):", "<EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host,", "\"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL']", "os import re env = os.environ class DbHelpers(object): \"\"\" Database helpers :author: <EMAIL>", "DbHelpers(object): \"\"\" Database helpers :author: <EMAIL> \"\"\" @staticmethod def __get_connection_parts__(connection_string, ): conn_str =", "import os import re env = os.environ class DbHelpers(object): \"\"\" Database helpers :author:", "__get_connection_parts__(connection_string, ): conn_str = env['CLEARDB_DATABASE_URL'] db_type, user, password, host, database = re.match('(.*?)://(.*?):(.*?)@(.*?)/(.*)', conn_str).groups()" ]
[ "= MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot,", "len(colObs) - 1 ] colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG", "using a simple LL parser ---- # Consts for token classification EOF =", "don't have an unfortunate ordering. # Outstanding is a set of all the", "covered by initial test suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args()", "the remainder of the test ## obligations. ## ## NOTE: Currently considering only", "to, procurement of substitute goods or services; loss of use, data, or profits;", "in output by suppressing them. # (Note they may still participate in excludes.)", "Is a given (slot,value) pair compatible with the test case so far? #", "((s1, v1),(s2,v2))) = cand old_v1 = testcase[ s1 ] testcase[ s1 ] =", "reader: if len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec))", "of this software, even if advised of the possibility of such damage. \"\"\"", "error message concerning a particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest)", "# # Is a given (slot,value) pair compatible with the test case so", "consider any test case with more than one ## single or error value", "procurement of substitute goods or services; loss of use, data, or profits; or", "MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind", "single obligation: \", slot, value, kind) testcase[slot] = value if completeCase( columnOrder, testcase", "parm = CategoriesList[ slot ] print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60) for", "== \"except\" : ValueExcepts.append( (val, slotNum, condVal) ) elif kind == \"error\" or", ": Singles.append( (val, slotNum, kind) ) singleton = True else : print_(\"*ERR* Unrecognized", "= s.find(\"//\"); if commentPos >= 0 : s = s[0:commentPos] for word in", "return tuple[1] # --------------- Build initial data structures ---- # Single columns are", "slot, val, conflict_slot, cs_value)) # Excludes that come from \"if\" clauses --- reverse", "Map (slot,value) pair to list of condition names ValueIfs = [ ] #", "if completeCase( columnOrder, testcase ) : Suite.append( testcase ) else: CaseMessage( \"Warning -", "testcase ) # Score the # Note one (but not both) of these", "EOF return commentPos = s.find(\"//\"); if commentPos >= 0 : s = s[0:commentPos]", "+ conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) ==", "if cond not in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val,", "col , \" with \", testcase) return False # ------------------------------------------------------------ # Print Warnings", "only categories with more than one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\",", "values) slotNum = len(CategoriesList) CategoriesList.append( category ) vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[", "= ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2)) ## ------------------------------------------------------------ ##", "dbg(\"#DBG (parseValues)\") values = [ ] while tokenClass( Token ) == ValueToken :", "\"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by initial test suites.", "ExceptToken : Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"except\"", "IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken", "be handled ## by the application, and we assume special case processing ##", ") : for ob in Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1] s2,", "is a pair (item, item), that is, ((slot, value), (slot, value)) # An", "v1, s2, v2 ): return ((s1, v1), (s2, v2)) def reversePair( pair ):", "# Obligations depend on excludes, so call makeExcludes before # calling makeObligations #", "pair (the two items must occur together in some case) # An exclusion", "s1 ] = old_v1 testcase[ s2 ] = old_v2 ## If we couldn't", "back \", s1, s2) # Restore previous values testcase[ s1 ] = old_v1", "def reversePair( pair ): return ( pair[1], pair[0] ) # Each item in", "(slotNum, val) ] = [] ## List of its properties for cond in", "not limited to, procurement of substitute goods or services; loss of use, data,", "# We will record obligations in three different data structures, # for different", "case, we remove obligations from # the outstanding obligations list. The other lists", "an outstanding obligation. # Dec 2006 --- Let's look at all the outstanding", "= len(keys) for i in range(nslots): ObsByCol[i] = [] for i in MultipleColumns", "dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[ len(colObs) - 1 ] colObs.pop() else:", "file=dest) sep=\"\" for col in range(len(vector)) : if vector[col] == DontCare : print_(sep+\"_\",end=\"\",", "print_(\"\") for slot in columns : parm = CategoriesList[ slot ] print_(\"%15s\" %", "and the inner loops try to fulfill # as many test obligations as", ") obindex = obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for", "dbg(\"#DBG (parsed value: \", val, \")\") values.append( val ) return values def parseValue():", "reading and writing test suites ## Constants (other than tokens for parsing) DontCare", "= valDesc[0] ## The name of the value itself ## Postpone marking val", "(false) DBGp = False ## Performance debugging, December 2006 maxCandidates = 50 ##", "tokenClass( Token ) == ExceptToken : Token = six.next(tokenStream) condname = Token Token", ": return False for tslot in range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot]))", "): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s1,v1))", "genpairs.py --csv --initial-suite tests.txt -o -v -p < foo.cp # To read the", "cases) ## * Not consider any pairs as being satisfied by a single", "parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token == EOF : return [ ]", "directions SingleColumns = [ ] # Columns with just one (non-error, non-single) choice", "print_(\"Warning: No non-singular value choices for \", CategoriesList[slot], \"; Pairs generation will fail.\")", "possible: \", testcase ) def CreateSingles(): for single in Singles: CreateSingle(single) def CreateSingle(", ", testcase ): return True else: dbg_p(\"#DBG *** Rolling back \", s1, s2)", "of pairs generation, and # we can save space in output by suppressing", "Log = logging.getLogger(__name__) # Debug messages def dbg(*msg): parts = [ str(x) for", "PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append( (val, slotNum, condVal", "The CategoriesList can also be considered the test case schema CategoriesValues = [", "\"not in specification\") in_schema_map.append(-1) for vec in reader: if len(vec) == len(in_schema) :", "in Excludes and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[", "dest=\"varying\", help=\"\"\"Include only categories with more than one non-error and non-single value\"\"\") optparser.add_option(\"-s\",", "for suite in UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs : print_(\"=== Pairs", "= colObs[ len(colObs) - 1 ] colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1],", "True else : print_(\"*ERR* Unrecognized condition attribute:\", cond) if not singleton: vlist.append( val", "Token ) == IfToken : Token = six.next(tokenStream) ifcond = Token Token =", "other lists are # cleared lazily, when we bring up an obligation. #", "j in range(i+1,nslots) : ## if j in SingleColumns: continue ## ## ---", "one direction ObsByCol = {} # Per column, both directions SingleColumns = [", "testcase ) # --------------------------------------------------------- # # Is a given (slot,value) pair compatible with", "*** Trying any value, regardless of obligation\") for val in CategoriesValues[ col ]", "suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions)", "Singles = [] ## List of (slot,value,kind) where kind is \"single\" or \"error\"", "+ \":\", len(Suite), \" test vectors\") print_(\"\") for slot in columns : parm", "fill this DontCare with something useful? # Let's try for an outstanding obligation.", "EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken", "Outstanding): # Here is our lazy deletion of obligations; we # clip from", "for cand in candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1 = testcase[ s1", "EOF : return EOFToken if tok.endswith(\":\") : return CategoryToken if tok == \"if\"", "True # --------------------------------------------------------- def MakeTuple ( len ): newList = [] for i", "will fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend", "word in s.split() : dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word) ) )", "Outstanding is a set of all the obligations still outstanding. # ObsByCol is", "import random ## for shuffling lists import csv ## for reading and writing", "read a partial suite of test cases (tests.txt) in CSV format, # plus", "human-readable # format: python genpairs.py < foo.cp # To read a partial suite", "suppressing them. # (Note they may still participate in excludes.) # # We'll", "\"\"\" usage = \"\"\"Usage: # To read a specification (foo.cp) and print the", "# List of (value, slot, condition) triples ## What we build Suite =", "will be handled ## by the application, and we assume special case processing", "default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\",", "(score, ((s1, v1),(s2,v2))) = cand old_v1 = testcase[ s1 ] testcase[ s1 ]", ") : \"\"\"Print a warning or error message concerning a particular partially-defined test", "and an itempair is a tuple # # Like AETG and several other", "for slot in columns : parm = CategoriesList[ slot ] print_(\"%15s\" % parm", "dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [ ]", "v2 ): return ((s1, v1), (s2, v2)) def reversePair( pair ): return (", "* Lazy deletion\") colObs[obindex] = colObs[ len(colObs) - 1 ] colObs.pop() else: if", "Platform compatibility # ---------------------------------------- import six # Python 2 and 3 compatibility from", "completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \", columnOrder, \" in \", testcase)", "len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if", "the initial test suite is not ## a suite of special and error", "* Attempting to complete\", testcase ) col = columnOrder[0] if testcase[col] != DontCare:", "exclusion) ----- def makePair( s1, v1, s2, v2 ): return ((s1, v1), (s2,", "optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging messages\", action=\"store_true\",", "\"Warning - No pair possible: \", testcase ) def completeCase( columnOrder, testcase )", "\"\"\"Usage: # To read a specification (foo.cp) and print the test vector in", "commentPos >= 0 : s = s[0:commentPos] for word in s.split() : dbg(\"#DBG", "[ ] obindex = 0 while obindex < len(colObs) and len(candidates) < maxCandidates", "modification, are permitted provided that the following conditions are met: * Redistributions of", "a pair (the two items must not occur together in any case) #", "more than one ## single or error value (we don't know which will", "= ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1,", "* Skipping column \", col, \" (already filled in)\") return completeCase( columnOrder[1:], testcase", "optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering 'error' and", "\"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with more than one non-error and", "cs_value)) def makeObligations() : if DBG: print_(\"--- Creating obligations list ---\") keys =", "(in order given) ## The CategoriesList can also be considered the test case", "this DontCare with something useful? # Let's try for an outstanding obligation. #", "fitness for a particular purpose are disclaimed. In no event shall the copyright", "Performance debugging, December 2006 maxCandidates = 50 ## Bigger = better solutions, smaller", "\"error\" Excludes = set() ## Set of ((slot,value),(slot,value)) (not symmetric) ObsList = [", "tokenStream.next() print_(\"Resuming from\" , Token) category = Token[0:-1] Token = six.next(tokenStream) values =", "test case, we remove obligations from # the outstanding obligations list. The other", "## obligations. ## ## NOTE: Currently considering only pair obligations, ## not singletons.", "The form of a pair (obligation or exclusion) ----- def makePair( s1, v1,", "# Single columns are those in which all but one value is #", "or error value (we don't know which will be handled ## by the", "slot in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite :", "s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" %", "in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \", columnOrder, \" in", "CategoriesValues[ conflict_slot ] : if cond not in ValueProps[ (conflict_slot, cs_value) ] :", "lists are # cleared lazily, when we bring up an obligation. # def", "if tok == \"single\" : return SingleToken if tok == \"error\" : return", "with or without modification, are permitted provided that the following conditions are met:", "] = [ ] for valDesc in values : val = valDesc[0] ##", "if obforward not in Excludes and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[", "## Postpone marking val as a possible value of the property ## until", "structures, # for different forms of quick access: # ObsList is a list", "An exclusion is a pair (the two items must not occur together in", "can save some time by # always fixing these at the beginning of", "value = value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value", "express or implied warranties, including, but not limited to, the implied warranties of", "--csv --initial-suite tests.txt < foo.cp \"\"\" # # An item is a pair", "reproduce the above copyright notice, this list of conditions and the following disclaimer", "of obligations; we # clip from the end of the list dbg_p(\"#DBG *", "same as above, and then produce a test suite that # covers the", "or business interruption) however caused and on any theory of liability, whether in", "# An item is a tuple, and an itempair is a tuple #", "LL parser ---- # Consts for token classification EOF = \"<EOF>\" CategoryToken =", "cases, and the inner loops try to fulfill # as many test obligations", ": return ErrorToken return ValueToken # Generator to produce tokens, one by one", "ValueProps[ (slotNum, val) ] = [] ## List of its properties for cond", "reversePair( pair ): return ( pair[1], pair[0] ) # Each item in the", "def compatible( item, testcase ) : slot, val = item if ( testcase[", "=== Attempting tuple seeded with\", testcase) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder)", "= value if completeCase( columnOrder, testcase ) : Suite.append( testcase ) else: CaseMessage(", "them. # (Note they may still participate in excludes.) # # We'll identify", "value), (slot, value)) # An obligation is a pair (the two items must", "for debugging messages\"\"\" t = MakeTuple( NCol ) s1,v1 = ob[0] s2,v2 =", "= [ ] ## List of value sets Singles = [] ## List", "commentPos = s.find(\"//\"); if commentPos >= 0 : s = s[0:commentPos] for word", "condVal = valOf(cond) if kind == \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum,", "(but not both) of these may coincide with # an existing element. We'll", "plus a test specification, and report which pairs of values have not #", "Read an initial test suite (or several), and ## eliminate those obligations, so", ": value = value + 1 candidates.append( (value, ob) ) obindex = obindex", "ObsList.pop() s1, v1 = seedObligation[0] s2, v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList)", "Outstanding : value = value + 1 if testcase[s2] != v2 : for", "first, and ## * Not consider any test case with more than one", "bring up an obligation. # def clearObligations(testcase) : testCaseValue = 0 for i", "binary forms, with or without modification, are permitted provided that the following conditions", "several other covering array generators, the outer # loop will generate test cases,", "print_(\"--- Creating obligations list ---\") keys = CategoriesList nslots = len(keys) for i", "i in range(nslots): ObsByCol[i] = [] for i in MultipleColumns : for v1", "simple LL parser ---- # Consts for token classification EOF = \"<EOF>\" CategoryToken", "t ) csv_writer.writerow( t ) # ---------------- ## Read an initial test suite", "list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[ len(colObs) - 1 ] colObs.pop()", "in SingleColumns: continue ## ## --- short cut doesn't work if only one", "(val, slotNum, kind) ) singleton = True else : print_(\"*ERR* Unrecognized condition attribute:\",", "are met: * Redistributions of source code must retain the above copyright notice,", ": if tok == EOF : return EOFToken if tok.endswith(\":\") : return CategoryToken", "[] ## List of (slot,value,kind) where kind is \"single\" or \"error\" Excludes =", "the possibility of such damage. \"\"\" usage = \"\"\"Usage: # To read a", "copyright owner or contributors be liable for any direct, indirect, incidental, special, exemplary,", "genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\" # # An item is a", "return [(\"error\", None )] + parseConditions() if tokenClass( Token ) == SingleToken :", "damages (including, but not limited to, procurement of substitute goods or services; loss", "dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) == ErrorToken : Token = six.next(tokenStream) return", "= ExceptCond for conflict_slot in PropsSlots[ cond ] : for cs_value in CategoriesValues[", "three different data structures, # for different forms of quick access: # ObsList", "prior written permission. This software is provided by the copyright holders and contributors", "limited to, the implied warranties of merchantability and fitness for a particular purpose", "a list (array) with n columns # # Representations: # A test case", "value, regardless of obligation\") for val in CategoriesValues[ col ] : if compatible((col,val),", "structures: # We will record obligations in three different data structures, # for", "Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \", category, \" ::= \",", "dbg(\"#DBG (parseValue, looking at \", Token, \")\") if tokenClass( Token ) != ValueToken", "\", testcase ) def completeCase( columnOrder, testcase ) : if len (columnOrder) ==", "Print Warnings (to stderr unless otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg, vector,", "# the outstanding obligations list. The other lists are # cleared lazily, when", "copyright holders and contributors \"as is\" and any express or implied warranties, including,", "value = t[slot] print_(\"%15s\" % value , end=\"\") print_( \"\" ) print_( \"\"", "col = in_schema[i] if col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning:", "and <NAME> # All rights reserved. # License = \"\"\" (C) 2007,2017 University", "UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList = [", "= \" \".join(parts) Log.debug(msg_string) # Performance debug messages def dbg_p(*msg): if DBGp: dbg(*msg)", "looking at \", Token, \")\") if tokenClass( Token ) != ValueToken : print_(\"Syntax", "When we complete a test case, we remove obligations from # the outstanding", "newList.append(DontCare) return newList def CreateCase(): seedObligation = ObsList.pop() while seedObligation not in Outstanding:", "pairs as being satisfied by a single ## or error case. ## For", "quick access: # ObsList is a list of obligations, some of which may", "test case so far? # def compatible( item, testcase ) : slot, val", "if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in", "a pair (obligation or exclusion) ----- def makePair( s1, v1, s2, v2 ):", "format: python genpairs.py < foo.cp # To read a partial suite of test", "assume that the initial test suite is not ## a suite of special", "in the documentation and/or other materials provided with the distribution. * Neither the", "if tok == \"error\" : return ErrorToken return ValueToken # Generator to produce", "\", vec) # ---------------- ## Print the set of outstanding obligations. Typical use", "parseConditions() dbg(\"#DBG parseValue returns\", value + conditions) return value + conditions def parseConditions():", "print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList = [ ] ##", "for \", CategoriesList[slot], \"; Pairs generation will fail.\") elif len(CategoriesValues[slot]) == 1 :", "for different forms of quick access: # ObsList is a list of obligations,", "\"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not produce test cases covering 'single'", "if only one varying column -- for v2 in CategoriesValues[j] : j_item =", "= [], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv format). Often used together", "execute, print -- parse() identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear(", "old_v2 ## If we couldn't score any more obligations, can we at least", "Token, \")\") if tokenClass( Token ) != ValueToken : print_(\"Syntax error, expecting value,", "= old_v1 testcase[ s2 ] = old_v2 ## If we couldn't score any", ") ) elif kind == \"except\" : ValueExcepts.append( (val, slotNum, condVal) ) elif", "csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no effect def initial_suite_clear( initial_suite ) :", "vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ] for valDesc", "2007 University of Oregon and <NAME> # All rights reserved. # License =", "excludes, so call makeExcludes before # calling makeObligations # def makeExcludes() : #", "): return tuple[0] def valOf( tuple ): return tuple[1] # --------------- Build initial", "no event shall the copyright owner or contributors be liable for any direct,", "liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but", "print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug", "(including, but not limited to, procurement of substitute goods or services; loss of", "if tok == EOF : return EOFToken if tok.endswith(\":\") : return CategoryToken if", "pair possible: \", testcase ) def CreateSingles(): for single in Singles: CreateSingle(single) def", "python genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\" # # An item is", "SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\", testcase) columnOrder", "= single dbg(\"#DBG single obligation: \", slot, value, kind) testcase[slot] = value if", "category = Token[0:-1] Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \", category,", "values testcase[ s1 ] = old_v1 testcase[ s2 ] = old_v2 ## If", "testcase[slot] != val) : return False for tslot in range(len(testcase)) : if ((slot,", "obligations; we # clip from the end of the list dbg_p(\"#DBG * Lazy", "try to fulfill # as many test obligations as possible with each test", "an obligation. # def clearObligations(testcase) : testCaseValue = 0 for i in range(", "not both) of these may coincide with # an existing element. We'll only", "\"if\" : ValueIfs.append( (val, slotNum, condVal ) ) elif kind == \"except\" :", "= [ ] # Columns with just one (non-error, non-single) choice MultipleColumns =", "test suite that # covers the missing pairs: python genpairs.py --csv --initial-suite tests.txt", "for token classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken", "(UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\")", "if tokenClass( Token ) == EOF : print_(\"Discarding rest of file\") return [", "A test suite is a list of test cases # An item is", ": print_(\"*ERR* Unrecognized condition attribute:\", cond) if not singleton: vlist.append( val ) parseSpec()", "about 9 minutes wall time on G4 laptop), so now we # set", "column \", col , \" with \", testcase) return False # ------------------------------------------------------------ #", "a spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer(", "is a pair (the two items must not occur together in any case)", "we are trying to see what is missing in an initial test suite.", "Oregon and <NAME> # All rights reserved. # License = \"\"\" (C) 2007,2017", "by # always fixing these at the beginning of pairs generation, and #", "mismatch in\", initial_suite) print_(\" Column \", i, \"'\" + col + \"'\", \"not", "reached>>\") yield EOF return commentPos = s.find(\"//\"); if commentPos >= 0 : s", "arising in any way out of the use of this software, even if", ", condname)] + parseConditions() dbg(\"#DBG No more conditions\") return [ ] # --------------", "# Note one (but not both) of these may coincide with # an", "UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True", "whether in contract, strict liability, or tort (including negligence or otherwise) arising in", "other spreadsheets, genpairs with the -i option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\",", "are permitted provided that the following conditions are met: * Redistributions of source", "range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles : Suite = [ ] CreateSingles() PrintTable(", "suite (in csv format). Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\",", "s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2)) ##", "list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG single obligation:", "for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular", "(C) 2007,2017 University of Oregon and <NAME>. All rights reserved. Redistribution and use", "v2 : for ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding :", "# A test suite is a list of test cases # An item", "# ObsList is a list of obligations, some of which may # already", "Token ) == EOF : print_(\"Discarding rest of file\") return [ ] Token", "Neither the name of the University of Oregon nor the names of its", "names (in order given) ## The CategoriesList can also be considered the test", "50 ## Bigger = better solutions, smaller = faster ## Platform compatibility #", "complete\", testcase ) col = columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping", "len(Suite), \" test vectors\") print_(\"\") for slot in columns : parm = CategoriesList[", "global Token dbg(\"#DBG (parseValues)\") values = [ ] while tokenClass( Token ) ==", "(suitable as input to Excel and other spreadsheets, genpairs with the -i option,", "## User arguments from optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\",", "0 # ==len(CategoriesList), set after parsing ## Temporary, for building excludes PropsSlots =", "tuple seeded with\", testcase) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if (", "a list of test cases # An item is a tuple, and an", "Excludes = set() ## Set of ((slot,value),(slot,value)) (not symmetric) ObsList = [ ]", "and <NAME>. All rights reserved. Redistribution and use in source and binary forms,", "both) of these may coincide with # an existing element. We'll only consider", "Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"except\" , condname)]", "(slot,value,kind) where kind is \"single\" or \"error\" Excludes = set() ## Set of", "in_schema , \" but saw \", vec) # ---------------- ## Print the set", "Typical use is when ## we are trying to see what is missing", "provided with the distribution. * Neither the name of the University of Oregon", "shall we fill this DontCare with something useful? # Let's try for an", "- 1 ] colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG ***", "with # an existing element. We'll only consider *added* value, # so we", "suite of special and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to", "if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\" ) else: PrintTable(", "to endorse or promote products derived from this software without specific prior written", "Postpone marking val as a possible value of the property ## until we", "materials provided with the distribution. * Neither the name of the University of", "False for tslot in range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot])) in Excludes:", "if tokenClass( Token ) == PropToken : Token = six.next(tokenStream) condname = Token", "] Token = tokenStream.next() print_(\"Resuming from\" , Token) category = Token[0:-1] Token =", "(columnOrder) == 0 : dbg_p(\"#DBG: *** Success: \", testcase) return True dbg_p(\"#DBG *", "ValueProps[ (slotNum, val ) ].append(condVal) if condVal not in PropsSlots : PropsSlots[condVal] =", "[\".format(msg), end=\"\", file=dest) sep=\"\" for col in range(len(vector)) : if vector[col] == DontCare", "and contributors \"as is\" and any express or implied warranties, including, but not", "input to Excel and other spreadsheets, genpairs with the -i option, and some", "obbackward = (j_item, i_item) if obforward not in Excludes and obbackward not in", "arguments from optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a", "test cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help", "in_schema = reader.next() in_schema_map = [ ] for i in range(len(in_schema)): col =", "valOf( tuple ): return tuple[1] # --------------- Build initial data structures ---- #", "v2 for slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple", "(slot,value) pair to list of condition names ValueIfs = [ ] # List", "0 for i in range( len(testcase) ): for j in range ( i+1,", "import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages def dbg(*msg): parts", "as being satisfied by a single ## or error case. ## For now,", ": for ob in Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2", "len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on excludes, so", "len(CategoriesList) CategoriesList.append( category ) vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category ] =", "return ((s1, v1), (s2, v2)) def reversePair( pair ): return ( pair[1], pair[0]", ") vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ] for", ": schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite : dbg(\"write row \"", "For at least meeting one obligation ((s1, v1), (s2, v2)) = ob if", "license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const", "= False ## Debugging mode, on (true) or off (false) DBGp = False", "CreateSingles(): for single in Singles: CreateSingle(single) def CreateSingle( single ): testcase = MakeTuple(", "To read a specification (foo.cp) and print the test vector in human-readable #", "clearObligations(testcase) : testCaseValue = 0 for i in range( len(testcase) ): for j", ") == ErrorToken : Token = six.next(tokenStream) return [(\"error\", None )] + parseConditions()", "print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\",", "print_(\"_\"*60) for t in Suite : for slot in columns : value =", "s = sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos", "the value itself ## Postpone marking val as a possible value of the", "col ] : if compatible((col,val), testcase) : testcase[ col ] = val if", "def MakeTuple ( len ): newList = [] for i in range(0,len): newList.append(DontCare)", "makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs : print_(\"===", "We can save some time by # always fixing these at the beginning", "only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug", "ValueExcepts : val, slot, cond = ExceptCond for conflict_slot in PropsSlots[ cond ]", "be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including,", "parm , end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite : for slot in", "and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories", "\"as is\" and any express or implied warranties, including, but not limited to,", "((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding", "is a dictionary obligations by column, also updated lazily. # # Exclude is", "from this software without specific prior written permission. This software is provided by", "of use, data, or profits; or business interruption) however caused and on any", "if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\",", "return True else: dbg_p(\"#DBG *** Rolling back \", s1, s2) # Restore previous", "obligation ((s1, v1), (s2, v2)) = ob if testcase[s1] != v1 : for", "for a particular purpose are disclaimed. In no event shall the copyright owner", "at least meeting one obligation ((s1, v1), (s2, v2)) = ob if testcase[s1]", "\"error\" : return ErrorToken return ValueToken # Generator to produce tokens, one by", "((s1, v1), (s2, v2)) = ob if testcase[s1] != v1 : for ccol", "return ValueToken # Generator to produce tokens, one by one # def getToken()", "print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert obligation to vector for debugging messages\"\"\"", "s2) # Restore previous values testcase[ s1 ] = old_v1 testcase[ s2 ]", "specified) # ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print a warning", "is provided by the copyright holders and contributors \"as is\" and any express", "optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\",", ": while len(ObsList) > 0 : CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise", ": val, slot, cond = ExceptCond for conflict_slot in PropsSlots[ cond ] :", "testcase = MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value,", "specification, and report which pairs of values have not # been covered: python", "CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [ ] for slot", "if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in", "are trying to see what is missing in an initial test suite. ##", "!= DontCare: dbg_p(\"#DBG * Skipping column \", col, \" (already filled in)\") return", "return value + conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token", "ValueToken : val = parseValue() dbg(\"#DBG (parsed value: \", val, \")\") values.append( val", "Not consider any pairs as being satisfied by a single ## or error", "can also be considered the test case schema CategoriesValues = [ ] ##", "col, \" (already filled in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns", "so call makeExcludes before # calling makeObligations # def makeExcludes() : # Excludes", "tok == \"error\" : return ErrorToken return ValueToken # Generator to produce tokens,", "just one (non-error, non-single) choice MultipleColumns = [ ] # Complement of SingleColumns", "terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const =", "any values ValueProps = { } # Map (slot,value) pair to list of", ": testcase[ col ] = val if completeCase( columnOrder[1:], testcase ): return True", "six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions()", "IfCond for conflict_slot in PropsSlots[ cond ] : for cs_value in CategoriesValues[ conflict_slot", "several), and ## eliminate those obligations, so we are creating ## a test", "(already filled in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \", columnOrder,", "\"\"\"Convert obligation to vector for debugging messages\"\"\" t = MakeTuple( NCol ) s1,v1", "= [ ] while tokenClass( Token ) == ValueToken : val = parseValue()", "## ## NOTE: Currently considering only pair obligations, ## not singletons. We should", "} # Map (slot,value) pair to list of condition names ValueIfs = [", "(we don't know which will be handled ## by the application, and we", "testcase[ s2 ] = v2 if completeCase( columnOrder[1:] , testcase ): return True", "pair[0] ) # Each item in the pair is a <slot,value> or <name,value>", "= False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.) ##", "CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\" ) else:", "Token = six.next(tokenStream) ifcond = Token Token = six.next(tokenStream) return [(\"if\" , ifcond)]", "\" print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert obligation to vector for debugging", "other features, including other special cases) ## * Not consider any pairs as", "# already have been fulfilled (deletion is lazy). We may scramble # this", "produce tokens, one by one # def getToken() : while 1: s =", "cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help =", "is missing in an initial test suite. ## def print_required_pairs( ) : for", "pair def slotOf( tuple ): return tuple[0] def nameOf( tuple ): return tuple[0]", "obligations, so we are creating ## a test suite to fill in the", "testCaseValue + 1 dbg(\"*** Value \", testCaseValue, testcase ) # --------------------------------------------------------- # #", "business interruption) however caused and on any theory of liability, whether in contract,", "to special diagnostic options -- if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options", "if ((slot, val), (tslot, testcase[tslot])) in Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in", "print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col in range(len(vector)) : if vector[col]", "effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles)", "action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv format). Often", "disclaimed. In no event shall the copyright owner or contributors be liable for", "one by one # def getToken() : while 1: s = sys.stdin.readline() if", ": Token = six.next(tokenStream) ifcond = Token Token = six.next(tokenStream) return [(\"if\" ,", "range(len(in_schema)): col = in_schema[i] if col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else:", "print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # --", "v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------ # --", "record obligations in three different data structures, # for different forms of quick", "items to lists of item. # import sys ## for file handling import", "while tokenClass( Token ) != CategoryToken : if tokenClass( Token ) == EOF", "spec file using a simple LL parser ---- # Consts for token classification", "Token = six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions() if tokenClass( Token )", "may be used to endorse or promote products derived from this software without", "------------------------------------------------------------ # Print results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title ) : if", "= (i_item, j_item) obbackward = (j_item, i_item) if obforward not in Excludes and", ": for ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value", "Let's try for an outstanding obligation. # Dec 2006 --- Let's look at", "s.split() : dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word) ) ) yield word", "tokenClass( Token ) == SingleToken : Token = six.next(tokenStream) return [(\"single\", None)] +", "if cond in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot,", "in Outstanding or reversePair(ob) in Outstanding): # Here is our lazy deletion of", "cs_value)) # Excludes that come from \"if\" clauses --- reverse sense for IfCond", "Attempting to complete\", testcase ) col = columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG", "i in range(len(in_schema)): col = in_schema[i] if col in CategoriesList: to_col = CategoriesList.index(col)", "name of the University of Oregon nor the names of its contributors may", "Main processing: Parse the script, execute, print -- parse() identifySingles() makeExcludes() makeObligations() for", "= list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG single", "and/or other materials provided with the distribution. * Neither the name of the", "testcase) : testcase[ col ] = val if completeCase( columnOrder[1:], testcase ): return", "CategoriesValues = [ ] ## List of value sets Singles = [] ##", "# We'll identify the multiples (non-single columns) as well, # because they are", "\" with \", testcase) return False # ------------------------------------------------------------ # Print Warnings (to stderr", "where kind is \"single\" or \"error\" Excludes = set() ## Set of ((slot,value),(slot,value))", "testcase[i], j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1", "(to stderr unless otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr )", "): return ( pair[1], pair[0] ) # Each item in the pair is", "pairs are from these NCol = 0 # ==len(CategoriesList), set after parsing ##", "name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2))", "PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append( (val,", "so we score the *new* parts only. value = 1 ## For at", "suite ) if UserOptions.pairs : print_(\"=== Pairs required for completion ===\" ) print_required_pairs()", "tokenClass( Token ) != CategoryToken : print_(\"Syntax error on \", Token, \" looking", "or \"error\" Excludes = set() ## Set of ((slot,value),(slot,value)) (not symmetric) ObsList =", "return [ ] # -------------- The form of a pair (obligation or exclusion)", "print -- parse() identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear( suite", "debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList = [ ] ## List", "the outer # loop will generate test cases, and the inner loops try", "pair (slot number, value) # An itempair is a pair (item, item), that", "item in the pair is a <slot,value> or <name,value> pair def slotOf( tuple", "may scramble # this list so we don't have an unfortunate ordering. #", "in \", testcase) # How shall we fill this DontCare with something useful?", "of the University of Oregon nor the names of its contributors may be", "used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not", "Token, \" looking for 'category:'\") print_(\"Skipping to next category\") ## Error recovery to", "coverage, varying columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles", "Excludes that come from \"except\" clauses for ExceptCond in ValueExcepts : val, slot,", "highest score. This is fairly expensive # (10^20 takes about 9 minutes wall", "test suite to fill in the remainder of the test ## obligations. ##", ">= 0 : s = s[0:commentPos] for word in s.split() : dbg(\"#DBG <<%s:", "is \"single\" or \"error\" Excludes = set() ## Set of ((slot,value),(slot,value)) (not symmetric)", "of (value, slot, condition) triples ## What we build Suite = [ ]", "of which may # already have been fulfilled (deletion is lazy). We may", "\".join(parts) Log.debug(msg_string) # Performance debug messages def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------", "must not occur together in any case) # A case is a list", "diagnostic options -- if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect:", "col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite)", "print_(\"*** Warning, format mismatch with initial suite \", initial_suite) print_(\"*** Expecting columns \",", "skipinitialspace=True ## Seems to have no effect def initial_suite_clear( initial_suite ) : matches", "suite (or several), and ## eliminate those obligations, so we are creating ##", "] Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\", value + conditions)", "Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\")", "ObsByCol = {} # Per column, both directions SingleColumns = [ ] #", "(or several), and ## eliminate those obligations, so we are creating ## a", "recovery to next category while tokenClass( Token ) != CategoryToken : if tokenClass(", ": dbg_p(\"#DBG: *** Success: \", testcase) return True dbg_p(\"#DBG * Attempting to complete\",", "DontCare dbg_p(\"#DBG ** Failing to fill column \", col , \" with \",", "columns : parm = CategoriesList[ slot ] print_(\"%15s\" % parm , end=\"\") print_(\"\")", "saw \", vec) # ---------------- ## Print the set of outstanding obligations. Typical", "or reversePair(ob) in Outstanding): # Here is our lazy deletion of obligations; we", "what is missing in an initial test suite. ## def print_required_pairs( ) :", "testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\", testcase) columnOrder = list(range(", "obligations as possible with each test case. # # Data structures: # We", "cond) if not singleton: vlist.append( val ) parseSpec() def parseValues(): global Token dbg(\"#DBG", "or error message concerning a particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\",", "in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append(", "[], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv format). Often used together with", "SingleColumns -- pairs are from these NCol = 0 # ==len(CategoriesList), set after", "Redistributions in binary form must reproduce the above copyright notice, this list of", "obligations by column, also updated lazily. # # Exclude is a dictionary mapping", "columns \", columnOrder, \" in \", testcase) # How shall we fill this", "score. This is fairly expensive # (10^20 takes about 9 minutes wall time", "even if advised of the possibility of such damage. \"\"\" usage = \"\"\"Usage:", "\"\" ) print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values,", "Print vectors as comma-separated values, for import into a spreadsheet or other CSV-consuming", "category while tokenClass( Token ) != CategoryToken : if tokenClass( Token ) ==", "CategoriesList nslots = len(keys) for i in range(nslots): ObsByCol[i] = [] for i", "miss other features, including other special cases) ## * Not consider any pairs", "on \", Token, \" looking for 'category:'\") print_(\"Skipping to next category\") ## Error", "(Useful only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions) if", "meeting one obligation ((s1, v1), (s2, v2)) = ob if testcase[s1] != v1", "= { } # For each category, all props on any values ValueProps", "the beginning of pairs generation, and # we can save space in output", "(Note they may still participate in excludes.) # # We'll identify the multiples", "import six # Python 2 and 3 compatibility from six import print_ ##", "use is when ## we are trying to see what is missing in", ") ].append(condVal) if condVal not in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif", "= [] for i in MultipleColumns : for v1 in CategoriesValues[i] : i_item", "## --- short cut doesn't work if only one varying column -- for", "provided that the following conditions are met: * Redistributions of source code must", "caused and on any theory of liability, whether in contract, strict liability, or", "far? # def compatible( item, testcase ) : slot, val = item if", "CSV format, # plus a test specification, and report which pairs of values", "which will be handled ## by the application, and we assume special case", "# # An item is a pair (slot number, value) # An itempair", "sys ## for file handling import random ## for shuffling lists import csv", "parameters DBG = False ## Debugging mode, on (true) or off (false) DBGp", "All obligations, but only one direction Outstanding = set() # All obligations, but", "# Performance debug messages def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ## User", "parseValues() dbg(\"#DBG Parsed: \", category, \" ::= \", values) slotNum = len(CategoriesList) CategoriesList.append(", "well, # because they are useful in several places # def identifySingles() :", "messages def dbg(*msg): parts = [ str(x) for x in msg ] msg_string", "seedObligation[0] s2, v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1] = v1", "CreateSingle(single) def CreateSingle( single ): testcase = MakeTuple( len(CategoriesList) ) columnOrder = list(range(", "in CategoriesValues[i] : i_item = (i, v1) for j in range(i+1,nslots) : ##", "t in Suite : for slot in columns : value = t[slot] print_(\"%15s\"", "and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not produce", "The other lists are # cleared lazily, when we bring up an obligation.", "dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\",", "range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot])) in Excludes: return False if ((tslot,", "obligations # and choose the one with highest score. This is fairly expensive", "already have been fulfilled (deletion is lazy). We may scramble # this list", "makeExcludes() makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs :", "(value, slot, condition) triples ValueExcepts = [ ] # List of (value, slot,", "varying columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles :", "and then produce a test suite that # covers the missing pairs: python", "the above copyright notice, this list of conditions and the following disclaimer. *", "\"error\" choice, i.e., for pairs # generation the value will be fixed. We", "not # been covered: python genpairs.py --csv --initial-suite tests.txt -o -v -p <", "its properties for cond in valDesc[1:] : kind = nameOf(cond) condVal = valOf(cond)", "in human-readable # format: python genpairs.py < foo.cp # To read a partial", "EOFToken if tok.endswith(\":\") : return CategoryToken if tok == \"if\" : return IfToken", "IfCond in ValueIfs : val, slot, cond = IfCond for conflict_slot in PropsSlots[", "of obligations, some of which may # already have been fulfilled (deletion is", "\"single\" or \"error\" choice, i.e., for pairs # generation the value will be", "else: dbg_p(\"#DBG *** Rolling back \", s1, s2) # Restore previous values testcase[", "# Debug messages def dbg(*msg): parts = [ str(x) for x in msg", "] = v1 old_v2 = testcase[ s2 ] testcase[ s2 ] = v2", "all-pairs covering test suite # # (c) 2007 University of Oregon and <NAME>", ": val, slot, cond = IfCond for conflict_slot in PropsSlots[ cond ] :", "format is comma-separated-values (suitable as input to Excel and other spreadsheets, genpairs with", "of file\") return [ ] Token = tokenStream.next() print_(\"Resuming from\" , Token) category", "handled ## by the application, and we assume special case processing ## may", "a pair (the two items must occur together in some case) # An", "return seedObligation = ObsList.pop() s1, v1 = seedObligation[0] s2, v2 = seedObligation[1] testcase", "testcase[ slot ] != DontCare and testcase[slot] != val) : return False for", "CategoriesProps = { } # For each category, all props on any values", "## Configuration parameters DBG = False ## Debugging mode, on (true) or off", "logging.getLogger(__name__) # Debug messages def dbg(*msg): parts = [ str(x) for x in", ") ) yield word Token = \"<PASSWORD>\" tokenStream = getToken() def parse(): global", "identify the multiples (non-single columns) as well, # because they are useful in", "# so we score the *new* parts only. value = 1 ## For", "are creating ## a test suite to fill in the remainder of the", "in valDesc[1:] : kind = nameOf(cond) condVal = valOf(cond) if kind == \"prop\"", "that the initial test suite is not ## a suite of special and", "CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row", "in s.split() : dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word) ) ) yield", "\"_\" ## Configuration parameters DBG = False ## Debugging mode, on (true) or", "in ValueIfs : val, slot, cond = IfCond for conflict_slot in PropsSlots[ cond", "without specific prior written permission. This software is provided by the copyright holders", "tokenClass( Token ) == IfToken : Token = six.next(tokenStream) ifcond = Token Token", "j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \" obligations ---\") # When", "suite that # covers the missing pairs: python genpairs.py --csv --initial-suite tests.txt <", "order given) ## The CategoriesList can also be considered the test case schema", "initial test suite (or several), and ## eliminate those obligations, so we are", "if tok == \"except\" : return ExceptToken if tok == \"single\" : return", "col + \"'\", \"not in specification\") in_schema_map.append(-1) for vec in reader: if len(vec)", "PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken", "in msg ] msg_string = \" \".join(parts) Log.debug(msg_string) # Performance debug messages def", "] : if compatible((col,val), testcase) : testcase[ col ] = val if completeCase(", "in MultipleColumns : for v1 in CategoriesValues[i] : i_item = (i, v1) for", "] = v2 if completeCase( columnOrder[1:] , testcase ): return True else: dbg_p(\"#DBG", "] = DontCare dbg_p(\"#DBG ** Failing to fill column \", col , \"", ": kind = nameOf(cond) condVal = valOf(cond) if kind == \"prop\" : CategoriesProps[", "covering test suite # # (c) 2007 University of Oregon and <NAME> #", "if advised of the possibility of such damage. \"\"\" usage = \"\"\"Usage: #", "testCaseValue, testcase ) # --------------------------------------------------------- # # Is a given (slot,value) pair compatible", "## Set of ((slot,value),(slot,value)) (not symmetric) ObsList = [ ] # All obligations,", "\"csv\" : PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title ) def PrintAsText( columns,", "csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.) ## First line should be", ") : Suite.append( testcase ) clearObligations( testcase ) else: CaseMessage( \"Warning - No", "continue ## ## --- short cut doesn't work if only one varying column", "!= -1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else: print_(\"*** Warning, format", "# # (c) 2007 University of Oregon and <NAME> # All rights reserved.", "sets Singles = [] ## List of (slot,value,kind) where kind is \"single\" or", ": PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise", ": return PropToken if tok == \"except\" : return ExceptToken if tok ==", "# def getToken() : while 1: s = sys.stdin.readline() if not s: dbg(\"#DBG", "condition) triples ValueExcepts = [ ] # List of (value, slot, condition) triples", "): testcase = MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder)", ") col = columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column \",", "if UserOptions.combinations : while len(ObsList) > 0 : CreateCase() if UserOptions.varying : PrintTable(", "PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\"", "one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only", "parsing ## Temporary, for building excludes PropsSlots = { } # For each", "by suppressing them. # (Note they may still participate in excludes.) # #", "obligations ---\") # When we complete a test case, we remove obligations from", "s2 ] = v2 if completeCase( columnOrder[1:] , testcase ): return True else:", "on (true) or off (false) DBGp = False ## Performance debugging, December 2006", "(val, slotNum, condVal) ) elif kind == \"error\" or kind == \"single\" :", "colObs[ len(colObs) - 1 ] colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1], testcase):", "(i_item, j_item) obbackward = (j_item, i_item) if obforward not in Excludes and obbackward", "reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.) ## First line", "return False # ------------------------------------------------------------ # Print Warnings (to stderr unless otherwise specified) #", "in range(i+1,nslots) : ## if j in SingleColumns: continue ## ## --- short", "to vector for debugging messages\"\"\" t = MakeTuple( NCol ) s1,v1 = ob[0]", "We'll only consider *added* value, # so we score the *new* parts only.", "optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in", "if UserOptions.pairs : print_(\"=== Pairs required for completion ===\" ) print_required_pairs() print_(\"=====================================\") if", "candidates considered colObs = ObsByCol[col] candidates = [ ] obindex = 0 while", ": for cs_value in CategoriesValues[ conflict_slot ] : if cond in ValueProps[ (conflict_slot,", "in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding :", "value) # An itempair is a pair (item, item), that is, ((slot, value),", "compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase ) # Score the # Note", "(array) with n columns # # Representations: # A test case is represented", ")] + parseConditions() if tokenClass( Token ) == SingleToken : Token = six.next(tokenStream)", "in Excludes: return False return True # --------------------------------------------------------- def MakeTuple ( len ):", "= colObs[obindex] if not (ob in Outstanding or reversePair(ob) in Outstanding): # Here", "We will record obligations in three different data structures, # for different forms", "generation will fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations", "A test case is represented as a list, indexed by column (category) #", "slot, cond = IfCond for conflict_slot in PropsSlots[ cond ] : for cs_value", "val, conflict_slot, cs_value)) def makeObligations() : if DBG: print_(\"--- Creating obligations list ---\")", "ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF def", "# Like AETG and several other covering array generators, the outer # loop", "initial_suite_clear( suite ) if UserOptions.pairs : print_(\"=== Pairs required for completion ===\" )", "## For at least meeting one obligation ((s1, v1), (s2, v2)) = ob", "# Generate an all-pairs covering test suite # # (c) 2007 University of", "columns \", in_schema , \" but saw \", vec) # ---------------- ## Print", "and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases", "## until we know whether it is a singleton singleton = False ValueProps[", "## if j in SingleColumns: continue ## ## --- short cut doesn't work", "of its contributors may be used to endorse or promote products derived from", "or kind == \"single\" : Singles.append( (val, slotNum, kind) ) singleton = True", "useful in several places # def identifySingles() : for slot in range(len(CategoriesList)) :", "columns are those in which all but one value is # listed as", "cond ] : for cs_value in CategoriesValues[ conflict_slot ] : if cond not", "obindex = obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand", "level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages def dbg(*msg): parts = [ str(x)", "permitted provided that the following conditions are met: * Redistributions of source code", "n columns # # Representations: # A test case is represented as a", "the -i option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\",", "identifySingles() : for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 : print_(\"Warning:", "test vectors\") print_(\"\") for slot in columns : parm = CategoriesList[ slot ]", "columns, descriptive_title ) : if UserOptions.output_format == \"csv\" : PrintAsCSV( columns ) else:", "= csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.) ## First line should", "(len(ObsList) == 0): return seedObligation = ObsList.pop() s1, v1 = seedObligation[0] s2, v2", "trying to see what is missing in an initial test suite. ## def", "or otherwise) arising in any way out of the use of this software,", "if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \",", "columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \", columnOrder, \" in \", testcase) #", "test suite # # (c) 2007 University of Oregon and <NAME> # All", "[ ] # All obligations, but only one direction Outstanding = set() #", ") s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------", "tok == EOF : return EOFToken if tok.endswith(\":\") : return CategoryToken if tok", "until we know whether it is a singleton singleton = False ValueProps[ (slotNum,", "case) # A case is a list (array) with n columns # #", "testcase) return True dbg_p(\"#DBG * Attempting to complete\", testcase ) col = columnOrder[0]", "been covered: python genpairs.py --csv --initial-suite tests.txt -o -v -p < foo.cp #", "(i, v1) for j in range(i+1,nslots) : ## if j in SingleColumns: continue", "goods or services; loss of use, data, or profits; or business interruption) however", "= ObsByCol[col] candidates = [ ] obindex = 0 while obindex < len(colObs)", "form must reproduce the above copyright notice, this list of conditions and the", "val) : return False for tslot in range(len(testcase)) : if ((slot, val), (tslot,", "## List of its properties for cond in valDesc[1:] : kind = nameOf(cond)", "Warning, format mismatch with initial suite \", initial_suite) print_(\"*** Expecting columns \", in_schema", "SingleColumns = [ ] # Columns with just one (non-error, non-single) choice MultipleColumns", "# Excludes that come from \"except\" clauses for ExceptCond in ValueExcepts : val,", "values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial test suite", "application. \"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row =", "and # we can save space in output by suppressing them. # (Note", "Generator to produce tokens, one by one # def getToken() : while 1:", "== \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if condVal", "if testcase[s1] != v1 : for ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol]))", "## List of test cases ## Instrumentation INSTR_N_Comparisons = 0 # ---------- Read", "score the *new* parts only. value = 1 ## For at least meeting", "\" , t ) csv_writer.writerow( t ) # ---------------- ## Read an initial", "not ## a suite of special and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True", "\", initial_suite) print_(\"*** Expecting columns \", in_schema , \" but saw \", vec)", "indexed by column (category) # A test suite is a list of test", "copyright notice, this list of conditions and the following disclaimer in the documentation", "testcase[ col ] = val if completeCase( columnOrder[1:], testcase ): return True else:", "== SingleToken : Token = six.next(tokenStream) return [(\"single\", None)] + parseConditions() if tokenClass(", ") else: CaseMessage( \"Warning - No pair possible: \", testcase ) def completeCase(", "(s2, v2)) = ob if testcase[s1] != v1 : for ccol in range(", "strict liability, or tort (including negligence or otherwise) arising in any way out", "## eliminate those obligations, so we are creating ## a test suite to", "\"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF def tokenClass( tok", "# --------------------------------------------------------- def MakeTuple ( len ): newList = [] for i in", "the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[ len(colObs) - 1 ]", "testcase ) : Suite.append( testcase ) else: CaseMessage( \"Warning - No pair possible:", "UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs : print_(\"=== Pairs required for completion", "it CategoriesProps = { } # For each category, all props on any", "return SingleToken if tok == \"error\" : return ErrorToken return ValueToken # Generator", ") else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles : Suite = [", "ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ # Print results # ------------------------------------------------------------ def", "suite # # (c) 2007 University of Oregon and <NAME> # All rights", "vector for debugging messages\"\"\" t = MakeTuple( NCol ) s1,v1 = ob[0] s2,v2", "met: * Redistributions of source code must retain the above copyright notice, this", "if not s: dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos = s.find(\"//\"); if", "= MakeTuple( NCol ) s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return", "NOTE: Currently considering only pair obligations, ## not singletons. We should look at", "= v1 old_v2 = testcase[ s2 ] testcase[ s2 ] = v2 if", "singleton = False ValueProps[ (slotNum, val) ] = [] ## List of its", "lists of item. # import sys ## for file handling import random ##", "dbg_p(\"#DBG * Skipping column \", col, \" (already filled in)\") return completeCase( columnOrder[1:],", "suites ## Constants (other than tokens for parsing) DontCare = \"_\" ## Configuration", "# # Exclude is a dictionary mapping items to lists of item. #", "or promote products derived from this software without specific prior written permission. This", "deletion of obligations; we # clip from the end of the list dbg_p(\"#DBG", "len(testcase) ): ob = makePair(i, testcase[i], j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob)", "ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \" obligations ---\") #", "partial suite of test cases (tests.txt) in CSV format, # plus a test", "# License = \"\"\" (C) 2007,2017 University of Oregon and <NAME>. All rights", "# Each item in the pair is a <slot,value> or <name,value> pair def", "possible with each test case. # # Data structures: # We will record", "categories with more than one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\",", "= nameOf(cond) condVal = valOf(cond) if kind == \"prop\" : CategoriesProps[ category ].append(condVal)", "def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values, for import into a spreadsheet", "== 0): return seedObligation = ObsList.pop() s1, v1 = seedObligation[0] s2, v2 =", "one obligation ((s1, v1), (s2, v2)) = ob if testcase[s1] != v1 :", "case is represented as a list, indexed by column (category) # A test", "CategoriesValues[i] : i_item = (i, v1) for j in range(i+1,nslots) : ## if", "for single in Singles: CreateSingle(single) def CreateSingle( single ): testcase = MakeTuple( len(CategoriesList)", "range(i+1,nslots) : ## if j in SingleColumns: continue ## ## --- short cut", "is lazy). We may scramble # this list so we don't have an", "ob if testcase[s1] != v1 : for ccol in range( len(testcase) ): if", "print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values, for import", "cond = IfCond for conflict_slot in PropsSlots[ cond ] : for cs_value in", "outstanding. # ObsByCol is a dictionary obligations by column, also updated lazily. #", "test cases, and the inner loops try to fulfill # as many test", "number of candidates considered colObs = ObsByCol[col] candidates = [ ] obindex =", "= reader.next() in_schema_map = [ ] for i in range(len(in_schema)): col = in_schema[i]", "].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \" obligations ---\") # When we", "fulfilled (deletion is lazy). We may scramble # this list so we don't", "PrintAsText( columns, descriptive_title ) def PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite),", "testcase ) : slot, val = item if ( testcase[ slot ] !=", "] testcase[ s1 ] = v1 old_v2 = testcase[ s2 ] testcase[ s2", "print_( \"\" ) print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated", "initial_suite) print_(\" Column \", i, \"'\" + col + \"'\", \"not in specification\")", "First line should be schema in_schema = reader.next() in_schema_map = [ ] for", "the end of the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[ len(colObs)", "damage. \"\"\" usage = \"\"\"Usage: # To read a specification (foo.cp) and print", "choices for \", CategoriesList[slot], \"; Pairs generation will fail.\") elif len(CategoriesValues[slot]) == 1", "print_(\" Column \", i, \"'\" + col + \"'\", \"not in specification\") in_schema_map.append(-1)", "the multiples (non-single columns) as well, # because they are useful in several", "i in MultipleColumns : for v1 in CategoriesValues[i] : i_item = (i, v1)", ") : slot, val = item if ( testcase[ slot ] != DontCare", "---------------------------------------- import six # Python 2 and 3 compatibility from six import print_", ": if DBG: print_(\"--- Creating obligations list ---\") keys = CategoriesList nslots =", "import csv ## for reading and writing test suites ## Constants (other than", "+ parseConditions() if tokenClass( Token ) == ExceptToken : Token = six.next(tokenStream) condname", "1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on excludes, so call makeExcludes", "covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\",", "for slot in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite", "tuple[0] def valOf( tuple ): return tuple[1] # --------------- Build initial data structures", "True dbg_p(\"#DBG * Attempting to complete\", testcase ) col = columnOrder[0] if testcase[col]", "dbg_p(\"### Candidates: \", candidates) for cand in candidates: (score, ((s1, v1),(s2,v2))) = cand", "MultipleColumns = [ ] # Complement of SingleColumns -- pairs are from these", "with it CategoriesProps = { } # For each category, all props on", ": i_item = (i, v1) for j in range(i+1,nslots) : ## if j", "complete a test case, we remove obligations from # the outstanding obligations list.", "the set of outstanding obligations. Typical use is when ## we are trying", "test cases ## Instrumentation INSTR_N_Comparisons = 0 # ---------- Read spec file using", "to fill column \", col , \" with \", testcase) return False #", "completeCase( columnOrder, testcase ) : Suite.append( testcase ) else: CaseMessage( \"Warning - No", "unless otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print", "values = parseValues() dbg(\"#DBG Parsed: \", category, \" ::= \", values) slotNum =", "Token Token = six.next(tokenStream) return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG No more", "if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular value choices for \", CategoriesList[slot],", "val as a possible value of the property ## until we know whether", "UserOptions.output_format == \"csv\" : PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title ) def", "# cleared lazily, when we bring up an obligation. # def clearObligations(testcase) :", "= logging.getLogger(__name__) # Debug messages def dbg(*msg): parts = [ str(x) for x", "ValueIfs = [ ] # List of (value, slot, condition) triples ValueExcepts =", ": return IfToken if tok == \"prop\" : return PropToken if tok ==", "\"except\" : ValueExcepts.append( (val, slotNum, condVal) ) elif kind == \"error\" or kind", "# (10^20 takes about 9 minutes wall time on G4 laptop), so now", "List of test cases ## Instrumentation INSTR_N_Comparisons = 0 # ---------- Read spec", ": s = s[0:commentPos] for word in s.split() : dbg(\"#DBG <<%s: %s>>\" %", "condition attribute:\", cond) if not singleton: vlist.append( val ) parseSpec() def parseValues(): global", "Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def makeObligations() : if DBG: print_(\"--- Creating", "conditions\") return [ ] # -------------- The form of a pair (obligation or", "1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value + 1 candidates.append( (value,", "return ErrorToken return ValueToken # Generator to produce tokens, one by one #", "Token ) == PropToken : Token = six.next(tokenStream) condname = Token Token =", "we # set a limit (maxCandidates) on number of candidates considered colObs =", "trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if in_schema_map[i] != -1 :", "help=\"Print a lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license", "for t in Suite : dbg(\"write row \" , t ) csv_writer.writerow( t", "# (Note they may still participate in excludes.) # # We'll identify the", "dbg(\"#DBG Parsed: \", category, \" ::= \", values) slotNum = len(CategoriesList) CategoriesList.append( category", "Obligations depend on excludes, so call makeExcludes before # calling makeObligations # def", "test suite. ## def print_required_pairs( ) : for ob in Outstanding : s1,", "= False ValueProps[ (slotNum, val) ] = [] ## List of its properties", "= parseConditions() dbg(\"#DBG parseValue returns\", value + conditions) return value + conditions def", "v1 in CategoriesValues[i] : i_item = (i, v1) for j in range(i+1,nslots) :", "((slot, val), (tslot, testcase[tslot])) in Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes:", ") : if tok == EOF : return EOFToken if tok.endswith(\":\") : return", "slot ] print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite", "% value , end=\"\") print_( \"\" ) print_( \"\" ) def PrintAsCSV(columns): \"\"\"", "csv_writer.writerow(schema_row) for t in Suite : dbg(\"write row \" , t ) csv_writer.writerow(", "== \"single\" : Singles.append( (val, slotNum, kind) ) singleton = True else :", "schema mismatch in\", initial_suite) print_(\" Column \", i, \"'\" + col + \"'\",", "\"Pairwise coverage, varying columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if", "score any more obligations, can we at least ## fill in some compatible", "< foo.cp # To read the same as above, and then produce a", ": print_(\"Discarding rest of file\") return [ ] Token = tokenStream.next() print_(\"Resuming from\"", "Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s,", ") : matches = False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working", "def ObToVector( ob ) : \"\"\"Convert obligation to vector for debugging messages\"\"\" t", "messages def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ## User arguments from optparse", "ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \",", "v2)) = ob if testcase[s1] != v1 : for ccol in range( len(testcase)", "parseValue returns\", value + conditions) return value + conditions def parseConditions(): global Token", "in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\", testcase)", "look at single and error ## cases first, and ## * Not consider", "is, ((slot, value), (slot, value)) # An obligation is a pair (the two", "= value + 1 candidates.append( (value, ob) ) obindex = obindex + 1", "dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv format). Often used together with -p\"\"\")", "## a test suite to fill in the remainder of the test ##", "= cand old_v1 = testcase[ s1 ] testcase[ s1 ] = v1 old_v2", "otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print a", "programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with more than", "save space in output by suppressing them. # (Note they may still participate", "DBG = False ## Debugging mode, on (true) or off (false) DBGp =", "while 1: s = sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\") yield EOF", "in the remainder of the test ## obligations. ## ## NOTE: Currently considering", "one varying column -- for v2 in CategoriesValues[j] : j_item = (j, v2)", "we remove obligations from # the outstanding obligations list. The other lists are", "item, testcase ) : slot, val = item if ( testcase[ slot ]", "print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse the script, execute,", "Token) category = Token[0:-1] Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \",", "in range( len(testcase) ): for j in range ( i+1, len(testcase) ): ob", "any test case with more than one ## single or error value (we", "for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not", "len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if", "event shall the copyright owner or contributors be liable for any direct, indirect,", ") else: print_(\"*** Warning, format mismatch with initial suite \", initial_suite) print_(\"*** Expecting", "that the following conditions are met: * Redistributions of source code must retain", "# ---------------- ## Print the set of outstanding obligations. Typical use is when", "\", testCaseValue, testcase ) # --------------------------------------------------------- # # Is a given (slot,value) pair", "slots with it CategoriesProps = { } # For each category, all props", "to lists of item. # import sys ## for file handling import random", "<<EOF reached>>\") yield EOF return commentPos = s.find(\"//\"); if commentPos >= 0 :", "[ ] CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ] for valDesc in values", "outstanding obligations list. The other lists are # cleared lazily, when we bring", "Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs", ") else: PrintAsText( columns, descriptive_title ) def PrintAsText( columns, descriptive_title ): print_(descriptive_title +", "after parsing ## Temporary, for building excludes PropsSlots = { } # For", "Dec 2006 --- Let's look at all the outstanding obligations # and choose", "# --------------- Build initial data structures ---- # Single columns are those in", "in range(len(in_schema)): col = in_schema[i] if col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col)", "but saw \", vec) # ---------------- ## Print the set of outstanding obligations.", "covering array generators, the outer # loop will generate test cases, and the", "not (ob in Outstanding or reversePair(ob) in Outstanding): # Here is our lazy", "testcase[ s1 ] testcase[ s1 ] = v1 old_v2 = testcase[ s2 ]", "token classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken =", "value, # so we score the *new* parts only. value = 1 ##", "None)] + parseConditions() if tokenClass( Token ) == IfToken : Token = six.next(tokenStream)", "kind == \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if", "dbg_p(\"#DBG *** Trying any value, regardless of obligation\") for val in CategoriesValues[ col", "schema_row = [ ] for slot in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row)", "= 50 ## Bigger = better solutions, smaller = faster ## Platform compatibility", "spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout,", "ob in Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2 = ob[1]", "== len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if in_schema_map[i]", "# Print Warnings (to stderr unless otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg,", "or tort (including negligence or otherwise) arising in any way out of the", "-------------- The form of a pair (obligation or exclusion) ----- def makePair( s1,", "December 2006 maxCandidates = 50 ## Bigger = better solutions, smaller = faster", "of a pair (obligation or exclusion) ----- def makePair( s1, v1, s2, v2", "indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement", "# An exclusion is a pair (the two items must not occur together", "other covering array generators, the outer # loop will generate test cases, and", ") # ---------------- ## Read an initial test suite (or several), and ##", "parseValues(): global Token dbg(\"#DBG (parseValues)\") values = [ ] while tokenClass( Token )", "dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word) ) ) yield word Token =", "help = \"\"\"Output format is comma-separated-values (suitable as input to Excel and other", "is a list of test cases # An item is a tuple, and", "the outstanding obligations list. The other lists are # cleared lazily, when we", "= \"<PASSWORD>\" tokenStream = getToken() def parse(): global Token global NCol Token =", "val ) return values def parseValue(): global Token dbg(\"#DBG (parseValue, looking at \",", "it is a singleton singleton = False ValueProps[ (slotNum, val) ] = []", "only consider *added* value, # so we score the *new* parts only. value", "in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\"", "= \"<SINGLE>\" EOFToken = EOF def tokenClass( tok ) : if tok ==", "for shuffling lists import csv ## for reading and writing test suites ##", "= Token Token = six.next(tokenStream) return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG No", "False ## Performance debugging, December 2006 maxCandidates = 50 ## Bigger = better", "val = parseValue() dbg(\"#DBG (parsed value: \", val, \")\") values.append( val ) return", "to Excel and other spreadsheets, genpairs with the -i option, and some other", "[(\"prop\" , condname)] + parseConditions() if tokenClass( Token ) == ExceptToken : Token", "slotNum, condVal ) ) elif kind == \"except\" : ValueExcepts.append( (val, slotNum, condVal)", "this list so we don't have an unfortunate ordering. # Outstanding is a", "cs_value in CategoriesValues[ conflict_slot ] : if cond in ValueProps[ (conflict_slot, cs_value) ]", "v2) obforward = (i_item, j_item) obbackward = (j_item, i_item) if obforward not in", "come from \"if\" clauses --- reverse sense for IfCond in ValueIfs : val,", "maxCandidates : ob = colObs[obindex] if not (ob in Outstanding or reversePair(ob) in", ": CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if condVal not in", "read the same as above, and then produce a test suite that #", "Excludes that come from \"if\" clauses --- reverse sense for IfCond in ValueIfs", "six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \", category, \" ::= \", values) slotNum", "To read the same as above, and then produce a test suite that", ": return [ ] if tokenClass( Token ) != CategoryToken : print_(\"Syntax error", "val in CategoriesValues[ col ] : if compatible((col,val), testcase) : testcase[ col ]", "= [ ] # List of (value, slot, condition) triples ## What we", "each test case. # # Data structures: # We will record obligations in", "if DBGp: dbg(*msg) # ------------------------------------ ## User arguments from optparse import OptionParser optparser", ", Token) category = Token[0:-1] Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed:", "some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with", "in Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2]", "\"\"\" Print vectors as comma-separated values, for import into a spreadsheet or other", "obligations, can we at least ## fill in some compatible value and move", "i in range( len(testcase) ): for j in range ( i+1, len(testcase) ):", "Warnings (to stderr unless otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr", "*** Compatible\", ob, testcase ) # Score the # Note one (but not", "# Dec 2006 --- Let's look at all the outstanding obligations # and", "missing in an initial test suite. ## def print_required_pairs( ) : for ob", "to complete\", testcase ) col = columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG *", "## Working yet? (No.) ## First line should be schema in_schema = reader.next()", "# Consts for token classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken =", "testcase[s2] = v2 for slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG ===", "pair (the two items must not occur together in any case) # A", "can save space in output by suppressing them. # (Note they may still", "Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value", "Failing to fill column \", col , \" with \", testcase) return False", "# Restore previous values testcase[ s1 ] = old_v1 testcase[ s2 ] =", "'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read", "## for reading and writing test suites ## Constants (other than tokens for", "testcase[tslot])) in Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return", "makePair( slot, val, conflict_slot, cs_value)) # Excludes that come from \"if\" clauses ---", "((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding", "len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if", "classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\"", "How shall we fill this DontCare with something useful? # Let's try for", "\"--debug\", help=\"Print a lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print", "## for shuffling lists import csv ## for reading and writing test suites", "Single columns are those in which all but one value is # listed", "purpose are disclaimed. In no event shall the copyright owner or contributors be", "fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on", "i in range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation = ObsList.pop() while seedObligation", "sense for IfCond in ValueIfs : val, slot, cond = IfCond for conflict_slot", "obindex = 0 while obindex < len(colObs) and len(candidates) < maxCandidates : ob", "tuple ): return tuple[0] def nameOf( tuple ): return tuple[0] def valOf( tuple", "just assume that the initial test suite is not ## a suite of", "= 0 for i in range( len(testcase) ): for j in range (", "Singles.append( (val, slotNum, kind) ) singleton = True else : print_(\"*ERR* Unrecognized condition", "expensive # (10^20 takes about 9 minutes wall time on G4 laptop), so", "(j, v2) obforward = (i_item, j_item) obbackward = (j_item, i_item) if obforward not", "help=\"\"\"Read initial test suite (in csv format). Often used together with -p\"\"\") optparser.add_option(\"-p\",", "vectors\") print_(\"\") for slot in columns : parm = CategoriesList[ slot ] print_(\"%15s\"", "value and move on? dbg_p(\"#DBG *** Trying any value, regardless of obligation\") for", "outstanding obligation. # Dec 2006 --- Let's look at all the outstanding obligations", "end=\"\") print_( \"\" ) print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print vectors as", "saw \", Token ) return [ \"--bogus--\"] value = [ Token ] Token", "To read a partial suite of test cases (tests.txt) in CSV format, #", "= \"_\" ## Configuration parameters DBG = False ## Debugging mode, on (true)", "## Temporary, for building excludes PropsSlots = { } # For each property", "# Exclude is a dictionary mapping items to lists of item. # import", "help = \"\"\"Do not produce test cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\",", "def parse(): global Token global NCol Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList)", "def PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite), \" test vectors\") print_(\"\")", "tort (including negligence or otherwise) arising in any way out of the use", "more obligations, can we at least ## fill in some compatible value and", "ob = makePair(i, testcase[i], j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue =", "# All obligations, but only one direction Outstanding = set() # All obligations,", "scramble # this list so we don't have an unfortunate ordering. # Outstanding", "sys.stdout, dialect=csv.excel ) schema_row = [ ] for slot in columns : schema_row.append(", "calling makeObligations # def makeExcludes() : # Excludes that come from \"except\" clauses", "-- Main processing: Parse the script, execute, print -- parse() identifySingles() makeExcludes() makeObligations()", "dest=sys.stderr ) : \"\"\"Print a warning or error message concerning a particular partially-defined", "optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by initial test", "# Score the # Note one (but not both) of these may coincide", "the above copyright notice, this list of conditions and the following disclaimer in", "Oregon nor the names of its contributors may be used to endorse or", "): return True else: testcase[ col ] = DontCare dbg_p(\"#DBG ** Failing to", "and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase ) # Score the #", "= v1 testcase[s2] = v2 for slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0]", "def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ## User arguments from optparse import", "a pair (slot number, value) # An itempair is a pair (item, item),", "makeExcludes() : # Excludes that come from \"except\" clauses for ExceptCond in ValueExcepts", "in Suite : for slot in columns : value = t[slot] print_(\"%15s\" %", "initial_suite_clear( initial_suite ) : matches = False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect)", "test suite is not ## a suite of special and error cases. ##", "Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList = [ ] ## List of category", "a tuple # # Like AETG and several other covering array generators, the", "by a single ## or error case. ## For now, we just assume", "# Python 2 and 3 compatibility from six import print_ ## Logging #", "condname = Token Token = six.next(tokenStream) return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG", "if tokenClass( Token ) == SingleToken : Token = six.next(tokenStream) return [(\"single\", None)]", "column \", col, \" (already filled in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG", "one ## single or error value (we don't know which will be handled", "the following disclaimer in the documentation and/or other materials provided with the distribution.", "in UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs : print_(\"=== Pairs required for", "file\") return [ ] Token = tokenStream.next() print_(\"Resuming from\" , Token) category =", "condname)] + parseConditions() if tokenClass( Token ) == ExceptToken : Token = six.next(tokenStream)", "only one varying column -- for v2 in CategoriesValues[j] : j_item = (j,", "compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase ) # Score", "we score the *new* parts only. value = 1 ## For at least", "val) ] = [] ## List of its properties for cond in valDesc[1:]", "reserved. # License = \"\"\" (C) 2007,2017 University of Oregon and <NAME>. All", "in several places # def identifySingles() : for slot in range(len(CategoriesList)) : if", "t in Suite : dbg(\"write row \" , t ) csv_writer.writerow( t )", "reader.next() in_schema_map = [ ] for i in range(len(in_schema)): col = in_schema[i] if", ": testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\", testcase) columnOrder =", "\"except\" clauses for ExceptCond in ValueExcepts : val, slot, cond = ExceptCond for", ") schema_row = [ ] for slot in columns : schema_row.append( CategoriesList[slot] )", "short cut doesn't work if only one varying column -- for v2 in", "action=\"store_const\", dest=\"output_format\", const = \"csv\", help = \"\"\"Output format is comma-separated-values (suitable as", "if tok == \"if\" : return IfToken if tok == \"prop\" : return", "cond = ExceptCond for conflict_slot in PropsSlots[ cond ] : for cs_value in", "\"prop\" : return PropToken if tok == \"except\" : return ExceptToken if tok", "we build Suite = [ ] ## List of test cases ## Instrumentation", "tokenClass(word) ) ) yield word Token = \"<PASSWORD>\" tokenStream = getToken() def parse():", "no effect def initial_suite_clear( initial_suite ) : matches = False reader = csv.reader(", "## Performance debugging, December 2006 maxCandidates = 50 ## Bigger = better solutions,", "v1 = seedObligation[0] s2, v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1]", "len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG single obligation: \",", "in range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation = ObsList.pop() while seedObligation not", "\"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with more than one non-error", "# ------------------------------------------------------------ # Print Warnings (to stderr unless otherwise specified) # ------------------------------------------------------------ def", "Token Token = six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions() if tokenClass( Token", "but not limited to, procurement of substitute goods or services; loss of use,", "= [ ] # All obligations, but only one direction Outstanding = set()", "Token dbg(\"#DBG (parseSpec)\") if Token == EOF : return [ ] if tokenClass(", "so we don't have an unfortunate ordering. # Outstanding is a set of", "## Read an initial test suite (or several), and ## eliminate those obligations,", ": initial_suite_clear( suite ) if UserOptions.pairs : print_(\"=== Pairs required for completion ===\"", "props on any values ValueProps = { } # Map (slot,value) pair to", "] while tokenClass( Token ) == ValueToken : val = parseValue() dbg(\"#DBG (parsed", "s: dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos = s.find(\"//\"); if commentPos >=", "dbg(\"#DBG parseValue returns\", value + conditions) return value + conditions def parseConditions(): global", "the value will be fixed. We can save some time by # always", "dbg(*msg) # ------------------------------------ ## User arguments from optparse import OptionParser optparser = OptionParser(usage=usage)", "# ObsByCol is a dictionary obligations by column, also updated lazily. # #", "old_v1 testcase[ s2 ] = old_v2 ## If we couldn't score any more", "we bring up an obligation. # def clearObligations(testcase) : testCaseValue = 0 for", "\"Pairwise coverage\" ) if UserOptions.singles : Suite = [ ] CreateSingles() PrintTable( range(len(CategoriesList)),", "test cases (tests.txt) in CSV format, # plus a test specification, and report", "six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"except\" , condname)] + parseConditions()", "Like AETG and several other covering array generators, the outer # loop will", "to fulfill # as many test obligations as possible with each test case.", "obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList)", "in specification\") in_schema_map.append(-1) for vec in reader: if len(vec) == len(in_schema) : trvec", "optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with more than one", "value, slot, kind = single dbg(\"#DBG single obligation: \", slot, value, kind) testcase[slot]", "------------------------------------ ## User arguments from optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\",", "!= DontCare and testcase[slot] != val) : return False for tslot in range(len(testcase))", "-- parse() identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear( suite )", "UserOptions.combinations : while len(ObsList) > 0 : CreateCase() if UserOptions.varying : PrintTable( MultipleColumns,", "initial data structures ---- # Single columns are those in which all but", "not in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" :", "s1 ] = v1 old_v2 = testcase[ s2 ] testcase[ s2 ] =", "] # Complement of SingleColumns -- pairs are from these NCol = 0", "loss of use, data, or profits; or business interruption) however caused and on", "consequential damages (including, but not limited to, procurement of substitute goods or services;", "== \"single\" : return SingleToken if tok == \"error\" : return ErrorToken return", "\", values) slotNum = len(CategoriesList) CategoriesList.append( category ) vlist = [ ] CategoriesValues.append(vlist)", "structures ---- # Single columns are those in which all but one value", "--- short cut doesn't work if only one varying column -- for v2", "permission. This software is provided by the copyright holders and contributors \"as is\"", "random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \" obligations ---\") # When we complete", "testCaseValue = testCaseValue + 1 dbg(\"*** Value \", testCaseValue, testcase ) # ---------------------------------------------------------", ": print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob", "is comma-separated-values (suitable as input to Excel and other spreadsheets, genpairs with the", "list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase ) ) :", "Configuration parameters DBG = False ## Debugging mode, on (true) or off (false)", "* Not consider any pairs as being satisfied by a single ## or", "No more conditions\") return [ ] # -------------- The form of a pair", "# import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages def dbg(*msg):", "= MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if in_schema_map[i] != -1 : trvec[in_schema_map[i]]", "action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help = \"\"\"Output", "return True # --------------------------------------------------------- def MakeTuple ( len ): newList = [] for", "set() ## Set of ((slot,value),(slot,value)) (not symmetric) ObsList = [ ] # All", "as a possible value of the property ## until we know whether it", "obligation. # Dec 2006 --- Let's look at all the outstanding obligations #", "= CategoriesList[ slot ] print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60) for t", "# Per column, both directions SingleColumns = [ ] # Columns with just", "with just one (non-error, non-single) choice MultipleColumns = [ ] # Complement of", "ordering. # Outstanding is a set of all the obligations still outstanding. #", "test cases # An item is a tuple, and an itempair is a", "now we # set a limit (maxCandidates) on number of candidates considered colObs", "2 and 3 compatibility from six import print_ ## Logging # import logging", "by column (category) # A test suite is a list of test cases", "value + conditions) return value + conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\")", ": Suite = [ ] CreateSingles() PrintTable( range(len(CategoriesList)), \"Single and error vectors\" )", "items must not occur together in any case) # A case is a", "NCol = 0 # ==len(CategoriesList), set after parsing ## Temporary, for building excludes", "ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken", ": for slot in columns : value = t[slot] print_(\"%15s\" % value ,", "<NAME>. All rights reserved. Redistribution and use in source and binary forms, with", "obligation. # def clearObligations(testcase) : testCaseValue = 0 for i in range( len(testcase)", "1 ## For at least meeting one obligation ((s1, v1), (s2, v2)) =", "(slotNum, val ) ].append(condVal) if condVal not in PropsSlots : PropsSlots[condVal] = set()", "of value sets Singles = [] ## List of (slot,value,kind) where kind is", "obforward = (i_item, j_item) obbackward = (j_item, i_item) if obforward not in Excludes", "* Redistributions of source code must retain the above copyright notice, this list", "of values have not # been covered: python genpairs.py --csv --initial-suite tests.txt -o", "schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite : dbg(\"write row \" ,", "columnOrder[1:], testcase ): return True else: testcase[ col ] = DontCare dbg_p(\"#DBG **", "cases # An item is a tuple, and an itempair is a tuple", "if tokenClass( Token ) != ValueToken : print_(\"Syntax error, expecting value, saw \",", "Skipping column \", col, \" (already filled in)\") return completeCase( columnOrder[1:], testcase )", "messages\"\"\" t = MakeTuple( NCol ) s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1", "considering only pair obligations, ## not singletons. We should look at single and", "a lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms", "by one # def getToken() : while 1: s = sys.stdin.readline() if not", "or error case. ## For now, we just assume that the initial test", "## ## --- short cut doesn't work if only one varying column --", "} # For each category, all props on any values ValueProps = {", "\"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv", "we know whether it is a singleton singleton = False ValueProps[ (slotNum, val)", "file handling import random ## for shuffling lists import csv ## for reading", "read a specification (foo.cp) and print the test vector in human-readable # format:", "= ObsList.pop() while seedObligation not in Outstanding: if (len(ObsList) == 0): return seedObligation", "work if only one varying column -- for v2 in CategoriesValues[j] : j_item", "the test ## obligations. ## ## NOTE: Currently considering only pair obligations, ##", ": Suite.append( testcase ) clearObligations( testcase ) else: CaseMessage( \"Warning - No pair", "------------------------------------------------------------ # Print Warnings (to stderr unless otherwise specified) # ------------------------------------------------------------ def CaseMessage(", "*added* value, # so we score the *new* parts only. value = 1", "for j in range(i+1,nslots) : ## if j in SingleColumns: continue ## ##", "\", testcase) # How shall we fill this DontCare with something useful? #", "any case) # A case is a list (array) with n columns #", "# Representations: # A test case is represented as a list, indexed by", "loops try to fulfill # as many test obligations as possible with each", "be used to endorse or promote products derived from this software without specific", "PropToken if tok == \"except\" : return ExceptToken if tok == \"single\" :", "remove obligations from # the outstanding obligations list. The other lists are #", "an all-pairs covering test suite # # (c) 2007 University of Oregon and", "these may coincide with # an existing element. We'll only consider *added* value,", "parse() identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear( suite ) if", ", ifcond)] + parseConditions() if tokenClass( Token ) == PropToken : Token =", "MultipleColumns, \"Pairwise coverage, varying columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" )", "contract, strict liability, or tort (including negligence or otherwise) arising in any way", "foo.cp \"\"\" # # An item is a pair (slot number, value) #", "the University of Oregon nor the names of its contributors may be used", "# # Data structures: # We will record obligations in three different data", "test case with more than one ## single or error value (we don't", "for completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) > 0", "# All rights reserved. # License = \"\"\" (C) 2007,2017 University of Oregon", "is a dictionary mapping items to lists of item. # import sys ##", ") == SingleToken : Token = six.next(tokenStream) return [(\"single\", None)] + parseConditions() if", "[(\"single\", None)] + parseConditions() if tokenClass( Token ) == IfToken : Token =", "condition) triples ## What we build Suite = [ ] ## List of", "= 1 ## For at least meeting one obligation ((s1, v1), (s2, v2))", "= [ ] ## List of category names (in order given) ## The", "with more than one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True,", "kind == \"single\" : Singles.append( (val, slotNum, kind) ) singleton = True else", "Candidates: \", candidates) for cand in candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1", "or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential", "t # ------------------------------------------------------------ # Print results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title )", ": for v1 in CategoriesValues[i] : i_item = (i, v1) for j in", "the outstanding obligations # and choose the one with highest score. This is", "Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages def", ") != CategoryToken : if tokenClass( Token ) == EOF : print_(\"Discarding rest", "= [ ] CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ] for valDesc in", "in ValueExcepts : val, slot, cond = ExceptCond for conflict_slot in PropsSlots[ cond", "is when ## we are trying to see what is missing in an", "## MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------ # -- Respond to special", "= CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\", testcase) columnOrder = list(range( len(CategoriesList)", "<name,value> pair def slotOf( tuple ): return tuple[0] def nameOf( tuple ): return", "return [ \"--bogus--\"] value = [ Token ] Token = six.next(tokenStream) conditions =", "return [ ] if tokenClass( Token ) != CategoryToken : print_(\"Syntax error on", "ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) # Excludes", "column -- for v2 in CategoriesValues[j] : j_item = (j, v2) obforward =", "(value, ob) ) obindex = obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \",", "( len ): newList = [] for i in range(0,len): newList.append(DontCare) return newList", "const = \"csv\", help = \"\"\"Output format is comma-separated-values (suitable as input to", "options -- if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \")", "# generation the value will be fixed. We can save some time by", ") return [ \"--bogus--\"] value = [ Token ] Token = six.next(tokenStream) conditions", "choose the one with highest score. This is fairly expensive # (10^20 takes", "off (false) DBGp = False ## Performance debugging, December 2006 maxCandidates = 50", "= [ ] for valDesc in values : val = valDesc[0] ## The", "if condVal not in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind ==", "values = [ ] while tokenClass( Token ) == ValueToken : val =", "may still participate in excludes.) # # We'll identify the multiples (non-single columns)", "We may scramble # this list so we don't have an unfortunate ordering.", "For each category, all props on any values ValueProps = { } #", "Representations: # A test case is represented as a list, indexed by column", "must retain the above copyright notice, this list of conditions and the following", "\"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering 'error' and 'single'", "next category while tokenClass( Token ) != CategoryToken : if tokenClass( Token )", "data structures ---- # Single columns are those in which all but one", ") random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase ) ) : Suite.append( testcase )", "# Here is our lazy deletion of obligations; we # clip from the", "1 dbg(\"*** Value \", testCaseValue, testcase ) # --------------------------------------------------------- # # Is a", "obligation\") for val in CategoriesValues[ col ] : if compatible((col,val), testcase) : testcase[", "ExceptCond in ValueExcepts : val, slot, cond = ExceptCond for conflict_slot in PropsSlots[", "sep=\"\" for col in range(len(vector)) : if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest)", "print_(\"Skipping to next category\") ## Error recovery to next category while tokenClass( Token", "software without specific prior written permission. This software is provided by the copyright", "property name, set of slots with it CategoriesProps = { } # For", "value of the property ## until we know whether it is a singleton", "= True else : print_(\"*ERR* Unrecognized condition attribute:\", cond) if not singleton: vlist.append(", "a singleton singleton = False ValueProps[ (slotNum, val) ] = [] ## List", "for cs_value in CategoriesValues[ conflict_slot ] : if cond not in ValueProps[ (conflict_slot,", "the test case schema CategoriesValues = [ ] ## List of value sets", "forms, with or without modification, are permitted provided that the following conditions are", "dbg(\"#DBG ***Trying columns \", columnOrder, \" in \", testcase) # How shall we", "parseSpec() NCol = len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token ==", "other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with more", "columnOrder[1:] , testcase ): return True else: dbg_p(\"#DBG *** Rolling back \", s1,", "(obligation or exclusion) ----- def makePair( s1, v1, s2, v2 ): return ((s1,", "## Error recovery to next category while tokenClass( Token ) != CategoryToken :", ": matches = False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet?", "limited to, procurement of substitute goods or services; loss of use, data, or", "Token ) != ValueToken : print_(\"Syntax error, expecting value, saw \", Token )", "in Outstanding : value = value + 1 candidates.append( (value, ob) ) obindex", "should look at single and error ## cases first, and ## * Not", "at the beginning of pairs generation, and # we can save space in", "columns # # Representations: # A test case is represented as a list,", "valDesc[1:] : kind = nameOf(cond) condVal = valOf(cond) if kind == \"prop\" :", "== \"error\" or kind == \"single\" : Singles.append( (val, slotNum, kind) ) singleton", "[(\"if\" , ifcond)] + parseConditions() if tokenClass( Token ) == PropToken : Token", "\"single\" or \"error\" Excludes = set() ## Set of ((slot,value),(slot,value)) (not symmetric) ObsList", "v1),(s2,v2))) = cand old_v1 = testcase[ s1 ] testcase[ s1 ] = v1", "\"--bogus--\"] value = [ Token ] Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG", "possible: \", testcase ) def completeCase( columnOrder, testcase ) : if len (columnOrder)", "tokenClass( Token ) != CategoryToken : if tokenClass( Token ) == EOF :", "ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"*** Value \", testCaseValue,", "itempair is a tuple # # Like AETG and several other covering array", ": if compatible((col,val), testcase) : testcase[ col ] = val if completeCase( columnOrder[1:],", "for col in range(len(vector)) : if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else:", "\"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv format).", "to produce tokens, one by one # def getToken() : while 1: s", "(slot,value) pair compatible with the test case so far? # def compatible( item,", "Token ) != CategoryToken : if tokenClass( Token ) == EOF : print_(\"Discarding", "when we bring up an obligation. # def clearObligations(testcase) : testCaseValue = 0", "tuple[0] def nameOf( tuple ): return tuple[0] def valOf( tuple ): return tuple[1]", "six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\", value + conditions) return value +", "obligations list ---\") keys = CategoriesList nslots = len(keys) for i in range(nslots):", "\" (already filled in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \",", "this list of conditions and the following disclaimer. * Redistributions in binary form", ") columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind = single", "with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug :", "format, # plus a test specification, and report which pairs of values have", "one value is # listed as a \"single\" or \"error\" choice, i.e., for", "[ ] while tokenClass( Token ) == ValueToken : val = parseValue() dbg(\"#DBG", "testcase[ s1 ] = v1 old_v2 = testcase[ s2 ] testcase[ s2 ]", "= [ ] ## List of test cases ## Instrumentation INSTR_N_Comparisons = 0", ") clearObligations( testcase ) else: CaseMessage( \"Warning - No pair possible: \", testcase", "AETG and several other covering array generators, the outer # loop will generate", "several places # def identifySingles() : for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot])", "lazy). We may scramble # this list so we don't have an unfortunate", "EOFToken = EOF def tokenClass( tok ) : if tok == EOF :", "sep=\", \" print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert obligation to vector for", "wall time on G4 laptop), so now we # set a limit (maxCandidates)", "range(len(vector)) : if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest)", "we are creating ## a test suite to fill in the remainder of", "of source code must retain the above copyright notice, this list of conditions", "def dbg(*msg): parts = [ str(x) for x in msg ] msg_string =", ": value = value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value =", "= [ ] for slot in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for", "for i in range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation = ObsList.pop() while", "] obindex = 0 while obindex < len(colObs) and len(candidates) < maxCandidates :", "!= CategoryToken : print_(\"Syntax error on \", Token, \" looking for 'category:'\") print_(\"Skipping", "category, all props on any values ValueProps = { } # Map (slot,value)", "for t in Suite : for slot in columns : value = t[slot]", "doesn't work if only one varying column -- for v2 in CategoriesValues[j] :", "[ ] # List of (value, slot, condition) triples ValueExcepts = [ ]", "or exclusion) ----- def makePair( s1, v1, s2, v2 ): return ((s1, v1),", "# A test case is represented as a list, indexed by column (category)", "%s=%s\" % (name1, v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization", "format). Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report", "CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite : dbg(\"write row \" , t", "!= ValueToken : print_(\"Syntax error, expecting value, saw \", Token ) return [", "ErrorToken : Token = six.next(tokenStream) return [(\"error\", None )] + parseConditions() if tokenClass(", "software is provided by the copyright holders and contributors \"as is\" and any", "Per column, both directions SingleColumns = [ ] # Columns with just one", ": trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else: print_(\"*** Warning, format mismatch with", "= CategoriesList nslots = len(keys) for i in range(nslots): ObsByCol[i] = [] for", "].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if condVal not in PropsSlots : PropsSlots[condVal]", "will be fixed. We can save some time by # always fixing these", "the inner loops try to fulfill # as many test obligations as possible", "print_(\"Resuming from\" , Token) category = Token[0:-1] Token = six.next(tokenStream) values = parseValues()", "of the value itself ## Postpone marking val as a possible value of", "not produce test cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\",", "DontCare with something useful? # Let's try for an outstanding obligation. # Dec", "UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse the script, execute, print -- parse()", ": SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on excludes, so call makeExcludes before", "maxCandidates = 50 ## Bigger = better solutions, smaller = faster ## Platform", "identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs", "Debugging mode, on (true) or off (false) DBGp = False ## Performance debugging,", "on any values ValueProps = { } # Map (slot,value) pair to list", "any express or implied warranties, including, but not limited to, the implied warranties", "single ## or error case. ## For now, we just assume that the", ": while 1: s = sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\") yield", "- No pair possible: \", testcase ) def CreateSingles(): for single in Singles:", "parseValue() dbg(\"#DBG (parsed value: \", val, \")\") values.append( val ) return values def", "False ValueProps[ (slotNum, val) ] = [] ## List of its properties for", "non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test", "in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite : dbg(\"write", "default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\",", "= seedObligation[0] s2, v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1] =", "assume special case processing ## may miss other features, including other special cases)", "## a suite of special and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ##", "= six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\", value + conditions) return value", "ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value", "optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not produce test cases covering", "copyright notice, this list of conditions and the following disclaimer. * Redistributions in", "Primary data structures CategoriesList = [ ] ## List of category names (in", "dest=\"output_format\", const = \"csv\", help = \"\"\"Output format is comma-separated-values (suitable as input", "NCol ) s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t #", "initialization above) ## ------------------------------------------------------------ # -- Respond to special diagnostic options -- if", "Creating obligations list ---\") keys = CategoriesList nslots = len(keys) for i in", "== \"error\" : return ErrorToken return ValueToken # Generator to produce tokens, one", "Complement of SingleColumns -- pairs are from these NCol = 0 # ==len(CategoriesList),", "data structures CategoriesList = [ ] ## List of category names (in order", "the missing pairs: python genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\" # #", "slot, val, conflict_slot, cs_value)) def makeObligations() : if DBG: print_(\"--- Creating obligations list", "\"error\" or kind == \"single\" : Singles.append( (val, slotNum, kind) ) singleton =", "as comma-separated values, for import into a spreadsheet or other CSV-consuming application. \"\"\"", "parseValue(): global Token dbg(\"#DBG (parseValue, looking at \", Token, \")\") if tokenClass( Token", "names ValueIfs = [ ] # List of (value, slot, condition) triples ValueExcepts", "and writing test suites ## Constants (other than tokens for parsing) DontCare =", ": dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word) ) ) yield word Token", "[(\"error\", None )] + parseConditions() if tokenClass( Token ) == SingleToken : Token", "as a list, indexed by column (category) # A test suite is a", "if ( testcase[ slot ] != DontCare and testcase[slot] != val) : return", "range(len(vec)) : if in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec )", "return newList def CreateCase(): seedObligation = ObsList.pop() while seedObligation not in Outstanding: if", "msg ] msg_string = \" \".join(parts) Log.debug(msg_string) # Performance debug messages def dbg_p(*msg):", "kind = single dbg(\"#DBG single obligation: \", slot, value, kind) testcase[slot] = value", "given (slot,value) pair compatible with the test case so far? # def compatible(", "mapping items to lists of item. # import sys ## for file handling", "print_(\"*ERR* Unrecognized condition attribute:\", cond) if not singleton: vlist.append( val ) parseSpec() def", "with something useful? # Let's try for an outstanding obligation. # Dec 2006", "return ExceptToken if tok == \"single\" : return SingleToken if tok == \"error\"", "if tokenClass( Token ) == ExceptToken : Token = six.next(tokenStream) condname = Token", "to fill in the remainder of the test ## obligations. ## ## NOTE:", "and error ## cases first, and ## * Not consider any test case", "a particular purpose are disclaimed. In no event shall the copyright owner or", "or profits; or business interruption) however caused and on any theory of liability,", "print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\",", "= vec[i] clearObligations( trvec ) else: print_(\"*** Warning, format mismatch with initial suite", "= \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken =", "+ 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value + 1 if", "= ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ # Print results # ------------------------------------------------------------", "usage = \"\"\"Usage: # To read a specification (foo.cp) and print the test", "): return ((s1, v1), (s2, v2)) def reversePair( pair ): return ( pair[1],", "i, \"'\" + col + \"'\", \"not in specification\") in_schema_map.append(-1) for vec in", "more than one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\",", "ErrorToken return ValueToken # Generator to produce tokens, one by one # def", "in PropsSlots[ cond ] : for cs_value in CategoriesValues[ conflict_slot ] : if", "name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------ #", "error on \", Token, \" looking for 'category:'\") print_(\"Skipping to next category\") ##", "completeCase( columnOrder, testcase ) ) : Suite.append( testcase ) clearObligations( testcase ) else:", "values have not # been covered: python genpairs.py --csv --initial-suite tests.txt -o -v", "places # def identifySingles() : for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) ==", ": trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if in_schema_map[i] != -1", "(conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def makeObligations() :", "val = valDesc[0] ## The name of the value itself ## Postpone marking", "cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def makeObligations() : if", "# covers the missing pairs: python genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\"", "We should look at single and error ## cases first, and ## *", "in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"*** Value \", testCaseValue, testcase", "for an outstanding obligation. # Dec 2006 --- Let's look at all the", "in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding :", "INSTR_N_Comparisons = 0 # ---------- Read spec file using a simple LL parser", "Debug messages def dbg(*msg): parts = [ str(x) for x in msg ]", "= set() # All obligations, but only one direction ObsByCol = {} #", "NCol = len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token == EOF", "the same as above, and then produce a test suite that # covers", "by initial test suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User", "six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions() if tokenClass( Token ) == PropToken", "solutions, smaller = faster ## Platform compatibility # ---------------------------------------- import six # Python", "with more than one ## single or error value (we don't know which", "this list of conditions and the following disclaimer in the documentation and/or other", "yield word Token = \"<PASSWORD>\" tokenStream = getToken() def parse(): global Token global", "testcase) return False # ------------------------------------------------------------ # Print Warnings (to stderr unless otherwise specified)", "---------------- ## Print the set of outstanding obligations. Typical use is when ##", "cut doesn't work if only one varying column -- for v2 in CategoriesValues[j]", "logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages def dbg(*msg): parts =", "of (value, slot, condition) triples ValueExcepts = [ ] # List of (value,", "ExceptToken if tok == \"single\" : return SingleToken if tok == \"error\" :", "generators, the outer # loop will generate test cases, and the inner loops", "option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only", ": CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\" )", "return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return True # ---------------------------------------------------------", "kind == \"except\" : ValueExcepts.append( (val, slotNum, condVal) ) elif kind == \"error\"", "Consts for token classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\"", "len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind =", "NCol Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG", "] if tokenClass( Token ) != CategoryToken : print_(\"Syntax error on \", Token,", "default=True, dest=\"singles\", help = \"\"\"Do not produce test cases covering 'single' or 'error'", "only one direction ObsByCol = {} # Per column, both directions SingleColumns =", ") == IfToken : Token = six.next(tokenStream) ifcond = Token Token = six.next(tokenStream)", ": testCaseValue = 0 for i in range( len(testcase) ): for j in", "dest=\"singles\", help = \"\"\"Do not produce test cases covering 'single' or 'error' values.\"\"\")", "PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles : Suite = [ ] CreateSingles()", "= value + 1 if testcase[s2] != v2 : for ccol in range(", "of test cases (tests.txt) in CSV format, # plus a test specification, and", "CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\", testcase) columnOrder = list(range( len(CategoriesList) )", "# --------------------------------------------------------- # # Is a given (slot,value) pair compatible with the test", "together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered", "If we couldn't score any more obligations, can we at least ## fill", "def identifySingles() : for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 :", "pair compatible with the test case so far? # def compatible( item, testcase", "# ------------------------------------ ## User arguments from optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\")", "def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) == ErrorToken :", "= ob if testcase[s1] != v1 : for ccol in range( len(testcase) ):", "len(ObsList) > 0 : CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying", "derived from this software without specific prior written permission. This software is provided", "An item is a pair (slot number, value) # An itempair is a", "# ------------------------------------------------------------ def PrintTable( columns, descriptive_title ) : if UserOptions.output_format == \"csv\" :", "binary form must reproduce the above copyright notice, this list of conditions and", "test suite (or several), and ## eliminate those obligations, so we are creating", "***Trying columns \", columnOrder, \" in \", testcase) # How shall we fill", "items must occur together in some case) # An exclusion is a pair", "schema CategoriesValues = [ ] ## List of value sets Singles = []", "six.next(tokenStream) ifcond = Token Token = six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions()", "import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging", "Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete,", "== 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on excludes, so call", "Token == EOF : return [ ] if tokenClass( Token ) != CategoryToken", "\", Token, \" looking for 'category:'\") print_(\"Skipping to next category\") ## Error recovery", "print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert obligation", "conditions and the following disclaimer. * Redistributions in binary form must reproduce the", "Token ] Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\", value +", "if tokenClass( Token ) != CategoryToken : print_(\"Syntax error on \", Token, \"", "and we assume special case processing ## may miss other features, including other", "An itempair is a pair (item, item), that is, ((slot, value), (slot, value))", "open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.) ## First line should be schema", "We'll identify the multiples (non-single columns) as well, # because they are useful", "Token = six.next(tokenStream) return [(\"error\", None )] + parseConditions() if tokenClass( Token )", "at least ## fill in some compatible value and move on? dbg_p(\"#DBG ***", "or consequential damages (including, but not limited to, procurement of substitute goods or", "with \", testcase) return False # ------------------------------------------------------------ # Print Warnings (to stderr unless", "test suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \",", "have an unfortunate ordering. # Outstanding is a set of all the obligations", "candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand in candidates: (score, ((s1, v1),(s2,v2)))", "in CategoriesValues[j] : j_item = (j, v2) obforward = (i_item, j_item) obbackward =", "beginning of pairs generation, and # we can save space in output by", "range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation = ObsList.pop() while seedObligation not in", "kind = nameOf(cond) condVal = valOf(cond) if kind == \"prop\" : CategoriesProps[ category", "): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s2,v2))", "any way out of the use of this software, even if advised of", "of these may coincide with # an existing element. We'll only consider *added*", "case is a list (array) with n columns # # Representations: # A", "special, exemplary, or consequential damages (including, but not limited to, procurement of substitute", "== PropToken : Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return", "] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def makeObligations() : if DBG:", "pair (item, item), that is, ((slot, value), (slot, value)) # An obligation is", "also updated lazily. # # Exclude is a dictionary mapping items to lists", "[ str(x) for x in msg ] msg_string = \" \".join(parts) Log.debug(msg_string) #", "initial test suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options:", "DBGp = False ## Performance debugging, December 2006 maxCandidates = 50 ## Bigger", "so now we # set a limit (maxCandidates) on number of candidates considered", "tokenClass( tok ) : if tok == EOF : return EOFToken if tok.endswith(\":\")", "UserOptions.singles : Suite = [ ] CreateSingles() PrintTable( range(len(CategoriesList)), \"Single and error vectors\"", "+ 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value + 1 candidates.append(", "exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format)", "in binary form must reproduce the above copyright notice, this list of conditions", "[ ] # List of (value, slot, condition) triples ## What we build", "# Map (slot,value) pair to list of condition names ValueIfs = [ ]", "return ( pair[1], pair[0] ) # Each item in the pair is a", "slot ] != DontCare and testcase[slot] != val) : return False for tslot", "# To read a specification (foo.cp) and print the test vector in human-readable", "as many test obligations as possible with each test case. # # Data", "is a set of all the obligations still outstanding. # ObsByCol is a", "# Let's try for an outstanding obligation. # Dec 2006 --- Let's look", "# an existing element. We'll only consider *added* value, # so we score", "old_v2 = testcase[ s2 ] testcase[ s2 ] = v2 if completeCase( columnOrder[1:]", "# -- Main processing: Parse the script, execute, print -- parse() identifySingles() makeExcludes()", "column, also updated lazily. # # Exclude is a dictionary mapping items to", "len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular value choices for \", CategoriesList[slot], \";", "Score the # Note one (but not both) of these may coincide with", "dbg(\"#DBG (parseSpec)\") if Token == EOF : return [ ] if tokenClass( Token", "List of its properties for cond in valDesc[1:] : kind = nameOf(cond) condVal", "= obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand in", "----- def makePair( s1, v1, s2, v2 ): return ((s1, v1), (s2, v2))", "or \"error\" choice, i.e., for pairs # generation the value will be fixed.", "Token ) == ExceptToken : Token = six.next(tokenStream) condname = Token Token =", "tok == \"except\" : return ExceptToken if tok == \"single\" : return SingleToken", "source code must retain the above copyright notice, this list of conditions and", "cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [],", "schema in_schema = reader.next() in_schema_map = [ ] for i in range(len(in_schema)): col", "values : val = valDesc[0] ## The name of the value itself ##", "*new* parts only. value = 1 ## For at least meeting one obligation", "# always fixing these at the beginning of pairs generation, and # we", "makeObligations() : if DBG: print_(\"--- Creating obligations list ---\") keys = CategoriesList nslots", "all the obligations still outstanding. # ObsByCol is a dictionary obligations by column,", "= [ ] for i in range(len(in_schema)): col = in_schema[i] if col in", "cs_value in CategoriesValues[ conflict_slot ] : if cond not in ValueProps[ (conflict_slot, cs_value)", "parse(): global Token global NCol Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def", "testcase[slot] = value if completeCase( columnOrder, testcase ) : Suite.append( testcase ) else:", "] = old_v2 ## If we couldn't score any more obligations, can we", "def getToken() : while 1: s = sys.stdin.readline() if not s: dbg(\"#DBG <<EOF", "single and error ## cases first, and ## * Not consider any test", "old_v1 = testcase[ s1 ] testcase[ s1 ] = v1 old_v2 = testcase[", "0 : CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\"", "newList = [] for i in range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation", "have not # been covered: python genpairs.py --csv --initial-suite tests.txt -o -v -p", "other materials provided with the distribution. * Neither the name of the University", "s.find(\"//\"); if commentPos >= 0 : s = s[0:commentPos] for word in s.split()", "and several other covering array generators, the outer # loop will generate test", "Compatible\", ob, testcase ) # Score the # Note one (but not both)", "] = [] ## List of its properties for cond in valDesc[1:] :", "but not limited to, the implied warranties of merchantability and fitness for a", "if completeCase( columnOrder[1:] , testcase ): return True else: dbg_p(\"#DBG *** Rolling back", "Unrecognized condition attribute:\", cond) if not singleton: vlist.append( val ) parseSpec() def parseValues():", "= \"\"\"Do not produce test cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\",", "# All obligations, but only one direction ObsByCol = {} # Per column,", "clip from the end of the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] =", "tok ) : if tok == EOF : return EOFToken if tok.endswith(\":\") :", "\", testcase) return False # ------------------------------------------------------------ # Print Warnings (to stderr unless otherwise", ": ValueIfs.append( (val, slotNum, condVal ) ) elif kind == \"except\" : ValueExcepts.append(", "the following disclaimer. * Redistributions in binary form must reproduce the above copyright", "parseConditions() if tokenClass( Token ) == IfToken : Token = six.next(tokenStream) ifcond =", "Token = six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions() if tokenClass( Token )", "of Oregon and <NAME> # All rights reserved. # License = \"\"\" (C)", "Columns with just one (non-error, non-single) choice MultipleColumns = [ ] # Complement", "elif kind == \"error\" or kind == \"single\" : Singles.append( (val, slotNum, kind)", "candidates.append( (value, ob) ) obindex = obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates:", "range(nslots): ObsByCol[i] = [] for i in MultipleColumns : for v1 in CategoriesValues[i]", "(parsed value: \", val, \")\") values.append( val ) return values def parseValue(): global", "len(keys) for i in range(nslots): ObsByCol[i] = [] for i in MultipleColumns :", "help=\"\"\"Report pairs not covered by initial test suites. (Useful only with --initial)\"\"\") (UserOptions,", "negligence or otherwise) arising in any way out of the use of this", "val, \")\") values.append( val ) return values def parseValue(): global Token dbg(\"#DBG (parseValue,", "> 0 : CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying columns", "a test suite that # covers the missing pairs: python genpairs.py --csv --initial-suite", "\"\"\" # # An item is a pair (slot number, value) # An", "An obligation is a pair (the two items must occur together in some", "def makePair( s1, v1, s2, v2 ): return ((s1, v1), (s2, v2)) def", "tokenStream = getToken() def parse(): global Token global NCol Token = six.next(tokenStream) parseSpec()", ": ob = colObs[obindex] if not (ob in Outstanding or reversePair(ob) in Outstanding):", "direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to,", "quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help =", ": Token = six.next(tokenStream) return [(\"error\", None )] + parseConditions() if tokenClass( Token", "lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and", "they may still participate in excludes.) # # We'll identify the multiples (non-single", "else: if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase )", "cases (tests.txt) in CSV format, # plus a test specification, and report which", "colObs[obindex] = colObs[ len(colObs) - 1 ] colObs.pop() else: if compatible(ob[0], testcase) and", "\", testcase) return True dbg_p(\"#DBG * Attempting to complete\", testcase ) col =", "\"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [", "above copyright notice, this list of conditions and the following disclaimer in the", "val), (tslot, testcase[tslot])) in Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return", "filled in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \", columnOrder, \"", "and len(candidates) < maxCandidates : ob = colObs[obindex] if not (ob in Outstanding", "use in source and binary forms, with or without modification, are permitted provided", ") != CategoryToken : print_(\"Syntax error on \", Token, \" looking for 'category:'\")", "( i+1, len(testcase) ): ob = makePair(i, testcase[i], j, testcase[j]) if ob in", "-p < foo.cp # To read the same as above, and then produce", "if tokenClass( Token ) == IfToken : Token = six.next(tokenStream) ifcond = Token", "mode, on (true) or off (false) DBGp = False ## Performance debugging, December", "in Outstanding: if (len(ObsList) == 0): return seedObligation = ObsList.pop() s1, v1 =", "+ 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand in candidates: (score,", "csv_writer.writerow( t ) # ---------------- ## Read an initial test suite (or several),", "together in any case) # A case is a list (array) with n", "generate test cases, and the inner loops try to fulfill # as many", "Suite = [ ] ## List of test cases ## Instrumentation INSTR_N_Comparisons =", "the names of its contributors may be used to endorse or promote products", "((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value + 1 candidates.append( (value, ob) )", "Success: \", testcase) return True dbg_p(\"#DBG * Attempting to complete\", testcase ) col", "): return tuple[1] # --------------- Build initial data structures ---- # Single columns", "## For now, we just assume that the initial test suite is not", "= len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token == EOF :", ") ) : Suite.append( testcase ) clearObligations( testcase ) else: CaseMessage( \"Warning -", "= testcase[ s1 ] testcase[ s1 ] = v1 old_v2 = testcase[ s2", "val if completeCase( columnOrder[1:], testcase ): return True else: testcase[ col ] =", "= \"\"\" (C) 2007,2017 University of Oregon and <NAME>. All rights reserved. Redistribution", "Let's look at all the outstanding obligations # and choose the one with", "existing element. We'll only consider *added* value, # so we score the *new*", "action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by initial test suites. (Useful only", "s[0:commentPos] for word in s.split() : dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word)", "): return True else: dbg_p(\"#DBG *** Rolling back \", s1, s2) # Restore", ") if UserOptions.pairs : print_(\"=== Pairs required for completion ===\" ) print_required_pairs() print_(\"=====================================\")", "or <name,value> pair def slotOf( tuple ): return tuple[0] def nameOf( tuple ):", "item is a pair (slot number, value) # An itempair is a pair", "of item. # import sys ## for file handling import random ## for", "know whether it is a singleton singleton = False ValueProps[ (slotNum, val) ]", ") return values def parseValue(): global Token dbg(\"#DBG (parseValue, looking at \", Token,", "+ col + \"'\", \"not in specification\") in_schema_map.append(-1) for vec in reader: if", "we at least ## fill in some compatible value and move on? dbg_p(\"#DBG", "ObsList complete, \", len(ObsList), \" obligations ---\") # When we complete a test", "def CreateSingles(): for single in Singles: CreateSingle(single) def CreateSingle( single ): testcase =", "source and binary forms, with or without modification, are permitted provided that the", "\"<SINGLE>\" EOFToken = EOF def tokenClass( tok ) : if tok == EOF", "for cs_value in CategoriesValues[ conflict_slot ] : if cond in ValueProps[ (conflict_slot, cs_value)", "candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand in candidates: (score, ((s1, v1),(s2,v2))) =", "of outstanding obligations. Typical use is when ## we are trying to see", "following conditions are met: * Redistributions of source code must retain the above", "== \"csv\" : PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title ) def PrintAsText(", "if ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"*** Value \",", "== DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def", "\" test vectors\") print_(\"\") for slot in columns : parm = CategoriesList[ slot", "parseConditions() if tokenClass( Token ) == ExceptToken : Token = six.next(tokenStream) condname =", "kind) testcase[slot] = value if completeCase( columnOrder, testcase ) : Suite.append( testcase )", "six import print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__)", "(parseConditions)\") if tokenClass( Token ) == ErrorToken : Token = six.next(tokenStream) return [(\"error\",", "our lazy deletion of obligations; we # clip from the end of the", "property ## until we know whether it is a singleton singleton = False", "any theory of liability, whether in contract, strict liability, or tort (including negligence", "building excludes PropsSlots = { } # For each property name, set of", ") elif kind == \"error\" or kind == \"single\" : Singles.append( (val, slotNum,", "# listed as a \"single\" or \"error\" choice, i.e., for pairs # generation", "tuple ): return tuple[1] # --------------- Build initial data structures ---- # Single", "= EOF def tokenClass( tok ) : if tok == EOF : return", "\", CategoriesList[slot], \"; Pairs generation will fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot)", "val, slot, cond = IfCond for conflict_slot in PropsSlots[ cond ] : for", "ObsList.pop() while seedObligation not in Outstanding: if (len(ObsList) == 0): return seedObligation =", "columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG", "candidates = [ ] obindex = 0 while obindex < len(colObs) and len(candidates)", "of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then", "required for completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) >", "print_(\"Syntax error on \", Token, \" looking for 'category:'\") print_(\"Skipping to next category\")", "for i in range(len(vec)) : if in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i]", "the following conditions are met: * Redistributions of source code must retain the", "Excludes.add( makePair( slot, val, conflict_slot, cs_value)) # Excludes that come from \"if\" clauses", "0): return seedObligation = ObsList.pop() s1, v1 = seedObligation[0] s2, v2 = seedObligation[1]", "= \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken =", "is a tuple # # Like AETG and several other covering array generators,", "\" looking for 'category:'\") print_(\"Skipping to next category\") ## Error recovery to next", "value + 1 candidates.append( (value, ob) ) obindex = obindex + 1 candidates.sort()", "print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite : for", "\"if\" : return IfToken if tok == \"prop\" : return PropToken if tok", "Value \", testCaseValue, testcase ) # --------------------------------------------------------- # # Is a given (slot,value)", "is a list of obligations, some of which may # already have been", "# and choose the one with highest score. This is fairly expensive #", "3 compatibility from six import print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING)", "properties for cond in valDesc[1:] : kind = nameOf(cond) condVal = valOf(cond) if", "t = MakeTuple( NCol ) s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2", "List of value sets Singles = [] ## List of (slot,value,kind) where kind", "two items must occur together in some case) # An exclusion is a", "] # List of (value, slot, condition) triples ## What we build Suite", "## not singletons. We should look at single and error ## cases first,", "len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase ) ) : Suite.append(", "move on? dbg_p(\"#DBG *** Trying any value, regardless of obligation\") for val in", "i_item = (i, v1) for j in range(i+1,nslots) : ## if j in", "possible value of the property ## until we know whether it is a", "for ExceptCond in ValueExcepts : val, slot, cond = ExceptCond for conflict_slot in", "i_item) if obforward not in Excludes and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward)", "initial test suite (in csv format). Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\",", "UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") #", "makeExcludes before # calling makeObligations # def makeExcludes() : # Excludes that come", "for ob in Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2 =", "specification (foo.cp) and print the test vector in human-readable # format: python genpairs.py", "\"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by initial test suites. (Useful", "above) ## ------------------------------------------------------------ # -- Respond to special diagnostic options -- if UserOptions.license:", "yet? (No.) ## First line should be schema in_schema = reader.next() in_schema_map =", "outer # loop will generate test cases, and the inner loops try to", "PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values, for import into a spreadsheet or", "a suite of special and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems", "def nameOf( tuple ): return tuple[0] def valOf( tuple ): return tuple[1] #", "i.e., for pairs # generation the value will be fixed. We can save", ") def CreateSingles(): for single in Singles: CreateSingle(single) def CreateSingle( single ): testcase", "len (columnOrder) == 0 : dbg_p(\"#DBG: *** Success: \", testcase) return True dbg_p(\"#DBG", "obligations, ## not singletons. We should look at single and error ## cases", "the one with highest score. This is fairly expensive # (10^20 takes about", "slot, kind = single dbg(\"#DBG single obligation: \", slot, value, kind) testcase[slot] =", "for 'category:'\") print_(\"Skipping to next category\") ## Error recovery to next category while", "< foo.cp \"\"\" # # An item is a pair (slot number, value)", "\", val, \")\") values.append( val ) return values def parseValue(): global Token dbg(\"#DBG", "return False for tslot in range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot])) in", "a warning or error message concerning a particular partially-defined test vector\"\"\" print_( \"{}", "## Constants (other than tokens for parsing) DontCare = \"_\" ## Configuration parameters", "optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\",", "===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) > 0 : CreateCase()", "the property ## until we know whether it is a singleton singleton =", "{ } # Map (slot,value) pair to list of condition names ValueIfs =", ") csv_writer.writerow( t ) # ---------------- ## Read an initial test suite (or", "# # Like AETG and several other covering array generators, the outer #", ": # Excludes that come from \"except\" clauses for ExceptCond in ValueExcepts :", "some case) # An exclusion is a pair (the two items must not", "is a pair (slot number, value) # An itempair is a pair (item,", "help=\"Print license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\",", "excludes PropsSlots = { } # For each property name, set of slots", "For each property name, set of slots with it CategoriesProps = { }", "Token = tokenStream.next() print_(\"Resuming from\" , Token) category = Token[0:-1] Token = six.next(tokenStream)", "## List of (slot,value,kind) where kind is \"single\" or \"error\" Excludes = set()", "vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest)", "\", slot, value, kind) testcase[slot] = value if completeCase( columnOrder, testcase ) :", ": j_item = (j, v2) obforward = (i_item, j_item) obbackward = (j_item, i_item)", "if ( completeCase( columnOrder, testcase ) ) : Suite.append( testcase ) clearObligations( testcase", "following disclaimer. * Redistributions in binary form must reproduce the above copyright notice,", "= OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging messages\", action=\"store_true\", default=False,", ": Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"prop\" ,", "x in msg ] msg_string = \" \".join(parts) Log.debug(msg_string) # Performance debug messages", "s2, v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2]", "compatible value and move on? dbg_p(\"#DBG *** Trying any value, regardless of obligation\")", "An item is a tuple, and an itempair is a tuple # #", "fulfill # as many test obligations as possible with each test case. #", "if UserOptions.singles : Suite = [ ] CreateSingles() PrintTable( range(len(CategoriesList)), \"Single and error", "seeded with\", testcase) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase(", "seedObligation not in Outstanding: if (len(ObsList) == 0): return seedObligation = ObsList.pop() s1,", "is not ## a suite of special and error cases. ## class csv_dialect(csv.excel):", "compatibility # ---------------------------------------- import six # Python 2 and 3 compatibility from six", "---\") keys = CategoriesList nslots = len(keys) for i in range(nslots): ObsByCol[i] =", "(other than tokens for parsing) DontCare = \"_\" ## Configuration parameters DBG =", "## The name of the value itself ## Postpone marking val as a", ": if cond in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val,", "print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse the script, execute, print --", "in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\",", "to, the implied warranties of merchantability and fitness for a particular purpose are", "value)) # An obligation is a pair (the two items must occur together", "print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob )", "## ------------------------------------------------------------ # -- Respond to special diagnostic options -- if UserOptions.license: print_(License)", "value = [ Token ] Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue", "error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no effect def", "(conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) # Excludes that", "ob ) : \"\"\"Convert obligation to vector for debugging messages\"\"\" t = MakeTuple(", "value choices for \", CategoriesList[slot], \"; Pairs generation will fail.\") elif len(CategoriesValues[slot]) ==", "== 0 : print_(\"Warning: No non-singular value choices for \", CategoriesList[slot], \"; Pairs", "of all the obligations still outstanding. # ObsByCol is a dictionary obligations by", "from six import print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log =", "symmetric) ObsList = [ ] # All obligations, but only one direction Outstanding", "in Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return True", "while len(ObsList) > 0 : CreateCase() if UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage,", "Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"prop\" , condname)]", "in any way out of the use of this software, even if advised", "User arguments from optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print", "nameOf(cond) condVal = valOf(cond) if kind == \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[", "range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value + 1", "} # For each property name, set of slots with it CategoriesProps =", "testcase ) dbg(\"#DBG ***Trying columns \", columnOrder, \" in \", testcase) # How", "= 0 while obindex < len(colObs) and len(candidates) < maxCandidates : ob =", "\"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF def tokenClass( tok ) : if", "obligation: \", slot, value, kind) testcase[slot] = value if completeCase( columnOrder, testcase )", "= six.next(tokenStream) return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG No more conditions\") return", "Temporary, for building excludes PropsSlots = { } # For each property name,", "Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) == ErrorToken : Token = six.next(tokenStream)", "\" \".join(parts) Log.debug(msg_string) # Performance debug messages def dbg_p(*msg): if DBGp: dbg(*msg) #", "(foo.cp) and print the test vector in human-readable # format: python genpairs.py <", "# Data structures: # We will record obligations in three different data structures,", "messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then quit)\", action=\"store_true\",default=False,", "category names (in order given) ## The CategoriesList can also be considered the", "pair to list of condition names ValueIfs = [ ] # List of", "name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM", "the script, execute, print -- parse() identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite", "limit (maxCandidates) on number of candidates considered colObs = ObsByCol[col] candidates = [", "the documentation and/or other materials provided with the distribution. * Neither the name", "v2)) def reversePair( pair ): return ( pair[1], pair[0] ) # Each item", "dictionary mapping items to lists of item. # import sys ## for file", "descriptive_title ): print_(descriptive_title + \":\", len(Suite), \" test vectors\") print_(\"\") for slot in", "vectors as comma-separated values, for import into a spreadsheet or other CSV-consuming application.", ": for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No", "str(x) for x in msg ] msg_string = \" \".join(parts) Log.debug(msg_string) # Performance", "item), that is, ((slot, value), (slot, value)) # An obligation is a pair", "len ): newList = [] for i in range(0,len): newList.append(DontCare) return newList def", "special case processing ## may miss other features, including other special cases) ##", "trvec ) else: print_(\"*** Warning, format mismatch with initial suite \", initial_suite) print_(\"***", "'category:'\") print_(\"Skipping to next category\") ## Error recovery to next category while tokenClass(", "<<%s: %s>>\" % ( word, tokenClass(word) ) ) yield word Token = \"<PASSWORD>\"", ": PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append( (val, slotNum,", "in CategoriesValues[ conflict_slot ] : if cond in ValueProps[ (conflict_slot, cs_value) ] :", "while tokenClass( Token ) == ValueToken : val = parseValue() dbg(\"#DBG (parsed value:", "must reproduce the above copyright notice, this list of conditions and the following", "\", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\",", "Token dbg(\"#DBG (parseValue, looking at \", Token, \")\") if tokenClass( Token ) !=", "coincide with # an existing element. We'll only consider *added* value, # so", "logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages def dbg(*msg): parts = [", ", condname)] + parseConditions() if tokenClass( Token ) == ExceptToken : Token =", "an unfortunate ordering. # Outstanding is a set of all the obligations still", "python genpairs.py --csv --initial-suite tests.txt -o -v -p < foo.cp # To read", "Token Token = six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions() if tokenClass( Token", "genpairs with the -i option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\",", "global Token dbg(\"#DBG (parseValue, looking at \", Token, \")\") if tokenClass( Token )", "## single or error value (we don't know which will be handled ##", "if len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec)) :", "------------------------------------------------------------ def PrintTable( columns, descriptive_title ) : if UserOptions.output_format == \"csv\" : PrintAsCSV(", "tokenClass( Token ) != ValueToken : print_(\"Syntax error, expecting value, saw \", Token", "## we are trying to see what is missing in an initial test", "---- # Consts for token classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\" ValueToken", "values def parseValue(): global Token dbg(\"#DBG (parseValue, looking at \", Token, \")\") if", "lazily, when we bring up an obligation. # def clearObligations(testcase) : testCaseValue =", "(ob in Outstanding or reversePair(ob) in Outstanding): # Here is our lazy deletion", "# Outstanding is a set of all the obligations still outstanding. # ObsByCol", "the implied warranties of merchantability and fitness for a particular purpose are disclaimed.", "up an obligation. # def clearObligations(testcase) : testCaseValue = 0 for i in", "fill column \", col , \" with \", testcase) return False # ------------------------------------------------------------", "Excludes: return False return True # --------------------------------------------------------- def MakeTuple ( len ): newList", "if in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else: print_(\"***", "def clearObligations(testcase) : testCaseValue = 0 for i in range( len(testcase) ): for", "= (i, v1) for j in range(i+1,nslots) : ## if j in SingleColumns:", "one direction Outstanding = set() # All obligations, but only one direction ObsByCol", "Print the set of outstanding obligations. Typical use is when ## we are", "Attempting tuple seeded with\", testcase) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if", "= CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column \", i,", "= set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append( (val, slotNum, condVal )", ": return EOFToken if tok.endswith(\":\") : return CategoryToken if tok == \"if\" :", "if tok.endswith(\":\") : return CategoryToken if tok == \"if\" : return IfToken if", "+ \"'\", \"not in specification\") in_schema_map.append(-1) for vec in reader: if len(vec) ==", "in_schema_map = [ ] for i in range(len(in_schema)): col = in_schema[i] if col", "as well, # because they are useful in several places # def identifySingles()", "global NCol Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec(): global Token", "print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing:", "tok == \"if\" : return IfToken if tok == \"prop\" : return PropToken", "as possible with each test case. # # Data structures: # We will", "(maxCandidates) on number of candidates considered colObs = ObsByCol[col] candidates = [ ]", ") def PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite), \" test vectors\")", "columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in Suite : dbg(\"write row", "will record obligations in three different data structures, # for different forms of", "dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ## User arguments from optparse import OptionParser", "of merchantability and fitness for a particular purpose are disclaimed. In no event", "obligations in three different data structures, # for different forms of quick access:", "each property name, set of slots with it CategoriesProps = { } #", "with the distribution. * Neither the name of the University of Oregon nor", "print_(descriptive_title + \":\", len(Suite), \" test vectors\") print_(\"\") for slot in columns :", "forms of quick access: # ObsList is a list of obligations, some of", "test suites ## Constants (other than tokens for parsing) DontCare = \"_\" ##", "sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos = s.find(\"//\");", "obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand in candidates:", "updated lazily. # # Exclude is a dictionary mapping items to lists of", "testcase[s2] != v2 : for ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in", "cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) # Excludes that come", "partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col in range(len(vector))", "contributors \"as is\" and any express or implied warranties, including, but not limited", "and the following disclaimer in the documentation and/or other materials provided with the", "compatibility from six import print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log", "CategoriesValues[ col ] : if compatible((col,val), testcase) : testcase[ col ] = val", "set of outstanding obligations. Typical use is when ## we are trying to", "(non-error, non-single) choice MultipleColumns = [ ] # Complement of SingleColumns -- pairs", "test case. # # Data structures: # We will record obligations in three", "in range(len(vec)) : if in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec", "notice, this list of conditions and the following disclaimer in the documentation and/or", "].append(condVal) if condVal not in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind", "csv ## for reading and writing test suites ## Constants (other than tokens", "] # -------------- The form of a pair (obligation or exclusion) ----- def", "out of the use of this software, even if advised of the possibility", "Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\", value + conditions) return", "Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return True #", "clearObligations( testcase ) else: CaseMessage( \"Warning - No pair possible: \", testcase )", "case schema CategoriesValues = [ ] ## List of value sets Singles =", "obforward not in Excludes and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i", "\"\"\"Do not produce test cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\",", "its contributors may be used to endorse or promote products derived from this", "name of the value itself ## Postpone marking val as a possible value", "val, conflict_slot, cs_value)) # Excludes that come from \"if\" clauses --- reverse sense", "in Outstanding : value = value + 1 if testcase[s2] != v2 :", "tokenClass( Token ) == EOF : print_(\"Discarding rest of file\") return [ ]", "for IfCond in ValueIfs : val, slot, cond = IfCond for conflict_slot in", "then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help", "been fulfilled (deletion is lazy). We may scramble # this list so we", "(slot number, value) # An itempair is a pair (item, item), that is,", "# Is a given (slot,value) pair compatible with the test case so far?", "\", testcase ) def CreateSingles(): for single in Singles: CreateSingle(single) def CreateSingle( single", "\"\" ) def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values, for import into", ": if in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else:", "elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on excludes,", "+ 1 if testcase[s2] != v2 : for ccol in range( len(testcase) ):", "((slot, value), (slot, value)) # An obligation is a pair (the two items", "Token global NCol Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec(): global", "faster ## Platform compatibility # ---------------------------------------- import six # Python 2 and 3", "\"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\"", "and ## eliminate those obligations, so we are creating ## a test suite", "above, and then produce a test suite that # covers the missing pairs:", "\"; Pairs generation will fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot)", "elif kind == \"if\" : ValueIfs.append( (val, slotNum, condVal ) ) elif kind", "## First line should be schema in_schema = reader.next() in_schema_map = [ ]", "of the use of this software, even if advised of the possibility of", "six.next(tokenStream) return [(\"single\", None)] + parseConditions() if tokenClass( Token ) == IfToken :", "clauses for ExceptCond in ValueExcepts : val, slot, cond = ExceptCond for conflict_slot", "fixed. We can save some time by # always fixing these at the", "len(ObsList), \" obligations ---\") # When we complete a test case, we remove", "if not (ob in Outstanding or reversePair(ob) in Outstanding): # Here is our", "DBGp: dbg(*msg) # ------------------------------------ ## User arguments from optparse import OptionParser optparser =", "MultipleColumns.append(slot) # Obligations depend on excludes, so call makeExcludes before # calling makeObligations", "are from these NCol = 0 # ==len(CategoriesList), set after parsing ## Temporary,", "return True else: testcase[ col ] = DontCare dbg_p(\"#DBG ** Failing to fill", "foo.cp # To read a partial suite of test cases (tests.txt) in CSV", "(deletion is lazy). We may scramble # this list so we don't have", "] CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ] for valDesc in values :", "non-singular value choices for \", CategoriesList[slot], \"; Pairs generation will fail.\") elif len(CategoriesValues[slot])", "completeCase( columnOrder, testcase ) : if len (columnOrder) == 0 : dbg_p(\"#DBG: ***", "list, indexed by column (category) # A test suite is a list of", "category, \" ::= \", values) slotNum = len(CategoriesList) CategoriesList.append( category ) vlist =", "testcase ) ) : Suite.append( testcase ) clearObligations( testcase ) else: CaseMessage( \"Warning", "use, data, or profits; or business interruption) however caused and on any theory", "None )] + parseConditions() if tokenClass( Token ) == SingleToken : Token =", "= value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value +", "matches = False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.)", "six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions() if tokenClass( Token ) == ExceptToken", "return tuple[0] def valOf( tuple ): return tuple[1] # --------------- Build initial data", "interruption) however caused and on any theory of liability, whether in contract, strict", "parts = [ str(x) for x in msg ] msg_string = \" \".join(parts)", "on any theory of liability, whether in contract, strict liability, or tort (including", "nameOf( tuple ): return tuple[0] def valOf( tuple ): return tuple[1] # ---------------", "ifcond = Token Token = six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions() if", "stderr unless otherwise specified) # ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr ) :", "[ ] for i in range(len(in_schema)): col = in_schema[i] if col in CategoriesList:", "with the -i option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False,", "descriptive_title ) : if UserOptions.output_format == \"csv\" : PrintAsCSV( columns ) else: PrintAsText(", "from these NCol = 0 # ==len(CategoriesList), set after parsing ## Temporary, for", "one with highest score. This is fairly expensive # (10^20 takes about 9", "unfortunate ordering. # Outstanding is a set of all the obligations still outstanding.", "slot in columns : parm = CategoriesList[ slot ] print_(\"%15s\" % parm ,", "Currently considering only pair obligations, ## not singletons. We should look at single", "dest=\"pairs\", help=\"\"\"Report pairs not covered by initial test suites. (Useful only with --initial)\"\"\")", "else: testcase[ col ] = DontCare dbg_p(\"#DBG ** Failing to fill column \",", "candidates) for cand in candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1 = testcase[", "script, execute, print -- parse() identifySingles() makeExcludes() makeObligations() for suite in UserOptions.initial_suite :", "and move on? dbg_p(\"#DBG *** Trying any value, regardless of obligation\") for val", "including other special cases) ## * Not consider any pairs as being satisfied", "and on any theory of liability, whether in contract, strict liability, or tort", "conditions and the following disclaimer in the documentation and/or other materials provided with", "a test case, we remove obligations from # the outstanding obligations list. The", "test specification, and report which pairs of values have not # been covered:", "set after parsing ## Temporary, for building excludes PropsSlots = { } #", "dbg_p(\"#DBG *** Compatible\", ob, testcase ) # Score the # Note one (but", "slotNum = len(CategoriesList) CategoriesList.append( category ) vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category", "= csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [ ] for slot in columns", "suite. ## def print_required_pairs( ) : for ob in Outstanding : s1, v1", "of its properties for cond in valDesc[1:] : kind = nameOf(cond) condVal =", "number, value) # An itempair is a pair (item, item), that is, ((slot,", "SingleColumns.append(slot) else: MultipleColumns.append(slot) # Obligations depend on excludes, so call makeExcludes before #", "return t # ------------------------------------------------------------ # Print results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title", "effect def initial_suite_clear( initial_suite ) : matches = False reader = csv.reader( open(initial_suite,", "in Suite : dbg(\"write row \" , t ) csv_writer.writerow( t ) #", "t ) # ---------------- ## Read an initial test suite (or several), and", "ObsByCol[i] = [] for i in MultipleColumns : for v1 in CategoriesValues[i] :", "import sys ## for file handling import random ## for shuffling lists import", "Restore previous values testcase[ s1 ] = old_v1 testcase[ s2 ] = old_v2", "def parseValue(): global Token dbg(\"#DBG (parseValue, looking at \", Token, \")\") if tokenClass(", "case processing ## may miss other features, including other special cases) ## *", "= [ ] # Complement of SingleColumns -- pairs are from these NCol", "seedObligation = ObsList.pop() s1, v1 = seedObligation[0] s2, v2 = seedObligation[1] testcase =", "expecting value, saw \", Token ) return [ \"--bogus--\"] value = [ Token", ") ) random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG single obligation: \", slot,", "a <slot,value> or <name,value> pair def slotOf( tuple ): return tuple[0] def nameOf(", "This software is provided by the copyright holders and contributors \"as is\" and", "products derived from this software without specific prior written permission. This software is", "is a tuple, and an itempair is a tuple # # Like AETG", "in range(nslots): ObsByCol[i] = [] for i in MultipleColumns : for v1 in", "# as many test obligations as possible with each test case. # #", "= 0 # ---------- Read spec file using a simple LL parser ----", "only. value = 1 ## For at least meeting one obligation ((s1, v1),", "completeCase( columnOrder[1:], testcase ): return True else: testcase[ col ] = DontCare dbg_p(\"#DBG", "if (len(ObsList) == 0): return seedObligation = ObsList.pop() s1, v1 = seedObligation[0] s2,", "non-single) choice MultipleColumns = [ ] # Complement of SingleColumns -- pairs are", "= [] for i in range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation =", "fill in some compatible value and move on? dbg_p(\"#DBG *** Trying any value,", "+ conditions) return value + conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if", "# been covered: python genpairs.py --csv --initial-suite tests.txt -o -v -p < foo.cp", "+ parseConditions() if tokenClass( Token ) == SingleToken : Token = six.next(tokenStream) return", "= testcase[ s2 ] testcase[ s2 ] = v2 if completeCase( columnOrder[1:] ,", "CategoriesList.append( category ) vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category ] = [", "more conditions\") return [ ] # -------------- The form of a pair (obligation", "\", Token, \")\") if tokenClass( Token ) != ValueToken : print_(\"Syntax error, expecting", "1: s = sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\") yield EOF return", "1 ] colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\",", "== EOF : print_(\"Discarding rest of file\") return [ ] Token = tokenStream.next()", "[ ] # Columns with just one (non-error, non-single) choice MultipleColumns = [", "conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) == ErrorToken", "def makeObligations() : if DBG: print_(\"--- Creating obligations list ---\") keys = CategoriesList", "MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2] = v2 for slot in SingleColumns", "slot in columns : value = t[slot] print_(\"%15s\" % value , end=\"\") print_(", "[(\"except\" , condname)] + parseConditions() dbg(\"#DBG No more conditions\") return [ ] #", "ob) ) obindex = obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates)", "= \"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken =", "in columns : value = t[slot] print_(\"%15s\" % value , end=\"\") print_( \"\"", "\"<EOF>\" CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\"", "particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col in", ": value = t[slot] print_(\"%15s\" % value , end=\"\") print_( \"\" ) print_(", "of Oregon nor the names of its contributors may be used to endorse", "val ) ].append(condVal) if condVal not in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum)", "DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList = [ ] ## List of", "if UserOptions.output_format == \"csv\" : PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title )", "with the test case so far? # def compatible( item, testcase ) :", "a list of obligations, some of which may # already have been fulfilled", "msg, vector, dest=sys.stderr ) : \"\"\"Print a warning or error message concerning a", "else: PrintAsText( columns, descriptive_title ) def PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\",", "%s>>\" % ( word, tokenClass(word) ) ) yield word Token = \"<PASSWORD>\" tokenStream", "# ---------- Read spec file using a simple LL parser ---- # Consts", "test cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default =", "compatible with the test case so far? # def compatible( item, testcase )", "value (we don't know which will be handled ## by the application, and", "a list, indexed by column (category) # A test suite is a list", "slotNum, kind) ) singleton = True else : print_(\"*ERR* Unrecognized condition attribute:\", cond)", ": Suite.append( testcase ) else: CaseMessage( \"Warning - No pair possible: \", testcase", "testcase ): return True else: testcase[ col ] = DontCare dbg_p(\"#DBG ** Failing", "pairs: python genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\" # # An item", "different data structures, # for different forms of quick access: # ObsList is", "with highest score. This is fairly expensive # (10^20 takes about 9 minutes", "!= CategoryToken : if tokenClass( Token ) == EOF : print_(\"Discarding rest of", "Outstanding or reversePair(ob) in Outstanding): # Here is our lazy deletion of obligations;", "False ## Debugging mode, on (true) or off (false) DBGp = False ##", "s = s[0:commentPos] for word in s.split() : dbg(\"#DBG <<%s: %s>>\" % (", "In no event shall the copyright owner or contributors be liable for any", "a specification (foo.cp) and print the test vector in human-readable # format: python", "dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help = \"\"\"Output format", "return EOFToken if tok.endswith(\":\") : return CategoryToken if tok == \"if\" : return", "not covered by initial test suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs) =", "--------------------------------------------------------- # # Is a given (slot,value) pair compatible with the test case", "set of all the obligations still outstanding. # ObsByCol is a dictionary obligations", "(including negligence or otherwise) arising in any way out of the use of", "a possible value of the property ## until we know whether it is", "still participate in excludes.) # # We'll identify the multiples (non-single columns) as", "single or error value (we don't know which will be handled ## by", "True else: testcase[ col ] = DontCare dbg_p(\"#DBG ** Failing to fill column", "= [ ] # List of (value, slot, condition) triples ValueExcepts = [", "# ==len(CategoriesList), set after parsing ## Temporary, for building excludes PropsSlots = {", "specification\") in_schema_map.append(-1) for vec in reader: if len(vec) == len(in_schema) : trvec =", "suite in UserOptions.initial_suite : initial_suite_clear( suite ) if UserOptions.pairs : print_(\"=== Pairs required", "fairly expensive # (10^20 takes about 9 minutes wall time on G4 laptop),", "\"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if condVal not", "colObs = ObsByCol[col] candidates = [ ] obindex = 0 while obindex <", "# (c) 2007 University of Oregon and <NAME> # All rights reserved. #", "the copyright holders and contributors \"as is\" and any express or implied warranties,", "return [(\"prop\" , condname)] + parseConditions() if tokenClass( Token ) == ExceptToken :", "least meeting one obligation ((s1, v1), (s2, v2)) = ob if testcase[s1] !=", "into a spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer =", "written permission. This software is provided by the copyright holders and contributors \"as", "Outstanding : value = value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value", "((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return True # --------------------------------------------------------- def MakeTuple (", "print_(\"*** Expecting columns \", in_schema , \" but saw \", vec) # ----------------", "profits; or business interruption) however caused and on any theory of liability, whether", "access: # ObsList is a list of obligations, some of which may #", "values ValueProps = { } # Map (slot,value) pair to list of condition", "= MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2] = v2 for slot in", "in which all but one value is # listed as a \"single\" or", ": \"\"\"Convert obligation to vector for debugging messages\"\"\" t = MakeTuple( NCol )", "## List of value sets Singles = [] ## List of (slot,value,kind) where", "Pairs generation will fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else: MultipleColumns.append(slot) #", "[ ] for valDesc in values : val = valDesc[0] ## The name", "The name of the value itself ## Postpone marking val as a possible", "# def compatible( item, testcase ) : slot, val = item if (", "] # List of (value, slot, condition) triples ValueExcepts = [ ] #", "## Platform compatibility # ---------------------------------------- import six # Python 2 and 3 compatibility", "test suite (in csv format). Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\",", "from\" , Token) category = Token[0:-1] Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG", "end=\"\", file=dest) sep=\"\" for col in range(len(vector)) : if vector[col] == DontCare :", "[ ] # Complement of SingleColumns -- pairs are from these NCol =", "\" but saw \", vec) # ---------------- ## Print the set of outstanding", "parseConditions() dbg(\"#DBG No more conditions\") return [ ] # -------------- The form of", "conflict_slot ] : if cond in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair(", "= old_v2 ## If we couldn't score any more obligations, can we at", "is # listed as a \"single\" or \"error\" choice, i.e., for pairs #", "ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ # Print results", "## The CategoriesList can also be considered the test case schema CategoriesValues =", ") csv_writer.writerow(schema_row) for t in Suite : dbg(\"write row \" , t )", "cond ] : for cs_value in CategoriesValues[ conflict_slot ] : if cond in", "Outstanding : value = value + 1 candidates.append( (value, ob) ) obindex =", "# How shall we fill this DontCare with something useful? # Let's try", "must occur together in some case) # An exclusion is a pair (the", ": Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def makeObligations() : if DBG: print_(\"---", "!= v1 : for ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding", "obligations, but only one direction Outstanding = set() # All obligations, but only", ": if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\",", "pair obligations, ## not singletons. We should look at single and error ##", ", \" but saw \", vec) # ---------------- ## Print the set of", "------------------------------------------------------------ ## MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------ # -- Respond to", "for i in range(len(in_schema)): col = in_schema[i] if col in CategoriesList: to_col =", "= Token Token = six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions() if tokenClass(", "] msg_string = \" \".join(parts) Log.debug(msg_string) # Performance debug messages def dbg_p(*msg): if", ": ValueExcepts.append( (val, slotNum, condVal) ) elif kind == \"error\" or kind ==", "at all the outstanding obligations # and choose the one with highest score.", "reversePair(ob) in Outstanding): # Here is our lazy deletion of obligations; we #", "space in output by suppressing them. # (Note they may still participate in", "[] for i in MultipleColumns : for v1 in CategoriesValues[i] : i_item =", "## If we couldn't score any more obligations, can we at least ##", "end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite : for slot in columns :", "these NCol = 0 # ==len(CategoriesList), set after parsing ## Temporary, for building", "v1 : for ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding :", "OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging messages\",", "data structures, # for different forms of quick access: # ObsList is a", "print_required_pairs( ) : for ob in Outstanding : s1, v1 = ob[0] name1=CategoriesList[s1]", "v1 = ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1,", "Singles: CreateSingle(single) def CreateSingle( single ): testcase = MakeTuple( len(CategoriesList) ) columnOrder =", "ob = colObs[obindex] if not (ob in Outstanding or reversePair(ob) in Outstanding): #", "ValueIfs.append( (val, slotNum, condVal ) ) elif kind == \"except\" : ValueExcepts.append( (val,", "global Token dbg(\"#DBG (parseSpec)\") if Token == EOF : return [ ] if", "] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) # Excludes that come from", "All rights reserved. # License = \"\"\" (C) 2007,2017 University of Oregon and", "CategoriesList = [ ] ## List of category names (in order given) ##", "direction ObsByCol = {} # Per column, both directions SingleColumns = [ ]", "col = columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column \", col,", "but only one direction Outstanding = set() # All obligations, but only one", "which all but one value is # listed as a \"single\" or \"error\"", "or other CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel", "with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by", "special diagnostic options -- if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in", "dbg(*msg): parts = [ str(x) for x in msg ] msg_string = \"", ": if ((slot, val), (tslot, testcase[tslot])) in Excludes: return False if ((tslot, testcase[tslot]),(slot,val))", "CategoryToken = \"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken", "(parseSpec)\") if Token == EOF : return [ ] if tokenClass( Token )", "direction Outstanding = set() # All obligations, but only one direction ObsByCol =", "0 : s = s[0:commentPos] for word in s.split() : dbg(\"#DBG <<%s: %s>>\"", "= ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ # Print", "outstanding obligations. Typical use is when ## we are trying to see what", "minutes wall time on G4 laptop), so now we # set a limit", "0 # ---------- Read spec file using a simple LL parser ---- #", "or without modification, are permitted provided that the following conditions are met: *", "features, including other special cases) ## * Not consider any pairs as being", "still outstanding. # ObsByCol is a dictionary obligations by column, also updated lazily.", "while obindex < len(colObs) and len(candidates) < maxCandidates : ob = colObs[obindex] if", ": if tokenClass( Token ) == EOF : print_(\"Discarding rest of file\") return", "Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"*** Value \", testCaseValue, testcase )", "= parseValue() dbg(\"#DBG (parsed value: \", val, \")\") values.append( val ) return values", "Error recovery to next category while tokenClass( Token ) != CategoryToken : if", "test case is represented as a list, indexed by column (category) # A", "deletion\") colObs[obindex] = colObs[ len(colObs) - 1 ] colObs.pop() else: if compatible(ob[0], testcase)", "= [] ## List of (slot,value,kind) where kind is \"single\" or \"error\" Excludes", "< foo.cp # To read a partial suite of test cases (tests.txt) in", "\"\"\" (C) 2007,2017 University of Oregon and <NAME>. All rights reserved. Redistribution and", "= (j_item, i_item) if obforward not in Excludes and obbackward not in Excludes:", "action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\",", ") else: CaseMessage( \"Warning - No pair possible: \", testcase ) def CreateSingles():", "set of slots with it CategoriesProps = { } # For each category,", "kind == \"if\" : ValueIfs.append( (val, slotNum, condVal ) ) elif kind ==", "as input to Excel and other spreadsheets, genpairs with the -i option, and", "in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) #", "conditions are met: * Redistributions of source code must retain the above copyright", "single ): testcase = MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) ) )", "Outstanding = set() # All obligations, but only one direction ObsByCol = {}", "Log.debug(msg_string) # Performance debug messages def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ##", "= val if completeCase( columnOrder[1:], testcase ): return True else: testcase[ col ]", "row \" , t ) csv_writer.writerow( t ) # ---------------- ## Read an", "special and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no", "= \"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken =", "Respond to special diagnostic options -- if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\")", "s1, v1 = seedObligation[0] s2, v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList) )", "we don't have an unfortunate ordering. # Outstanding is a set of all", "column, both directions SingleColumns = [ ] # Columns with just one (non-error,", "Parsed: \", category, \" ::= \", values) slotNum = len(CategoriesList) CategoriesList.append( category )", "+ 1 dbg(\"*** Value \", testCaseValue, testcase ) # --------------------------------------------------------- # # Is", "something useful? # Let's try for an outstanding obligation. # Dec 2006 ---", "tuple, and an itempair is a tuple # # Like AETG and several", "s1, s2) # Restore previous values testcase[ s1 ] = old_v1 testcase[ s2", "return PropToken if tok == \"except\" : return ExceptToken if tok == \"single\"", "\"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help = \"\"\"Output format is comma-separated-values", "each category, all props on any values ValueProps = { } # Map", ") print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) > 0 : CreateCase() if", "\"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col in range(len(vector)) : if vector[col] ==", "ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN", "and the following disclaimer. * Redistributions in binary form must reproduce the above", "however caused and on any theory of liability, whether in contract, strict liability,", "compatible( item, testcase ) : slot, val = item if ( testcase[ slot", "Seems to have no effect def initial_suite_clear( initial_suite ) : matches = False", "singleton = True else : print_(\"*ERR* Unrecognized condition attribute:\", cond) if not singleton:", "vector, dest=sys.stderr ) : \"\"\"Print a warning or error message concerning a particular", "= faster ## Platform compatibility # ---------------------------------------- import six # Python 2 and", "options: \", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data", "clearObligations( trvec ) else: print_(\"*** Warning, format mismatch with initial suite \", initial_suite)", "of the property ## until we know whether it is a singleton singleton", "Token = six.next(tokenStream) return [(\"single\", None)] + parseConditions() if tokenClass( Token ) ==", "columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite), \" test vectors\") print_(\"\") for slot", "item is a tuple, and an itempair is a tuple # # Like", "item. # import sys ## for file handling import random ## for shuffling", "elif kind == \"except\" : ValueExcepts.append( (val, slotNum, condVal) ) elif kind ==", "values.append( val ) return values def parseValue(): global Token dbg(\"#DBG (parseValue, looking at", "Excel and other spreadsheets, genpairs with the -i option, and some other programs).\"\"\")", ": return SingleToken if tok == \"error\" : return ErrorToken return ValueToken #", "obligation is a pair (the two items must occur together in some case)", "if kind == \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal)", "% parm , end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite : for slot", ": value = value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value =", "ValueToken # Generator to produce tokens, one by one # def getToken() :", "contributors may be used to endorse or promote products derived from this software", "is a <slot,value> or <name,value> pair def slotOf( tuple ): return tuple[0] def", "val ) parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\") values = [ ]", "## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) # Debug messages", "{ } # For each property name, set of slots with it CategoriesProps", "--initial-suite tests.txt -o -v -p < foo.cp # To read the same as", "< maxCandidates : ob = colObs[obindex] if not (ob in Outstanding or reversePair(ob)", "v1 old_v2 = testcase[ s2 ] testcase[ s2 ] = v2 if completeCase(", "-i option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\", action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include", "value = value + 1 if testcase[s2] != v2 : for ccol in", "of test cases ## Instrumentation INSTR_N_Comparisons = 0 # ---------- Read spec file", "case. ## For now, we just assume that the initial test suite is", "or services; loss of use, data, or profits; or business interruption) however caused", "value is # listed as a \"single\" or \"error\" choice, i.e., for pairs", "if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value + 1 candidates.append( (value, ob)", "= parseValues() dbg(\"#DBG Parsed: \", category, \" ::= \", values) slotNum = len(CategoriesList)", "## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no effect def initial_suite_clear( initial_suite", "== ExceptToken : Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return", "not s: dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos = s.find(\"//\"); if commentPos", "# A case is a list (array) with n columns # # Representations:", "(j_item, i_item) if obforward not in Excludes and obbackward not in Excludes: ObsList.append(obforward)", "ValueExcepts = [ ] # List of (value, slot, condition) triples ## What", "vector in human-readable # format: python genpairs.py < foo.cp # To read a", "1 candidates.sort() candidates.reverse() dbg_p(\"### Candidates: \", candidates) for cand in candidates: (score, ((s1,", "because they are useful in several places # def identifySingles() : for slot", ": PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title ) def PrintAsText( columns, descriptive_title", "of category names (in order given) ## The CategoriesList can also be considered", "Pairs required for completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList)", "slot, val = item if ( testcase[ slot ] != DontCare and testcase[slot]", "(tests.txt) in CSV format, # plus a test specification, and report which pairs", "debugging, December 2006 maxCandidates = 50 ## Bigger = better solutions, smaller =", "## by the application, and we assume special case processing ## may miss", "have been fulfilled (deletion is lazy). We may scramble # this list so", "# Print results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title ) : if UserOptions.output_format", ") : Suite.append( testcase ) else: CaseMessage( \"Warning - No pair possible: \",", "Here is our lazy deletion of obligations; we # clip from the end", "word Token = \"<PASSWORD>\" tokenStream = getToken() def parse(): global Token global NCol", "rights reserved. Redistribution and use in source and binary forms, with or without", "= set() ## Set of ((slot,value),(slot,value)) (not symmetric) ObsList = [ ] #", "False return True # --------------------------------------------------------- def MakeTuple ( len ): newList = []", "theory of liability, whether in contract, strict liability, or tort (including negligence or", "least ## fill in some compatible value and move on? dbg_p(\"#DBG *** Trying", "yield EOF return commentPos = s.find(\"//\"); if commentPos >= 0 : s =", "len(candidates) < maxCandidates : ob = colObs[obindex] if not (ob in Outstanding or", "import into a spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer", "--- reverse sense for IfCond in ValueIfs : val, slot, cond = IfCond", "used to endorse or promote products derived from this software without specific prior", "one (non-error, non-single) choice MultipleColumns = [ ] # Complement of SingleColumns --", "lists import csv ## for reading and writing test suites ## Constants (other", "return CategoryToken if tok == \"if\" : return IfToken if tok == \"prop\"", "CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if condVal not in PropsSlots", ": print_(\"=== Pairs required for completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations :", "itempair is a pair (item, item), that is, ((slot, value), (slot, value)) #", "ObsList = [ ] # All obligations, but only one direction Outstanding =", "\", col , \" with \", testcase) return False # ------------------------------------------------------------ # Print", "conflict_slot in PropsSlots[ cond ] : for cs_value in CategoriesValues[ conflict_slot ] :", "know which will be handled ## by the application, and we assume special", "set a limit (maxCandidates) on number of candidates considered colObs = ObsByCol[col] candidates", "\")\") if tokenClass( Token ) != ValueToken : print_(\"Syntax error, expecting value, saw", "= IfCond for conflict_slot in PropsSlots[ cond ] : for cs_value in CategoriesValues[", "value = value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value", "dialect=csv.excel ) schema_row = [ ] for slot in columns : schema_row.append( CategoriesList[slot]", "( word, tokenClass(word) ) ) yield word Token = \"<PASSWORD>\" tokenStream = getToken()", ") def completeCase( columnOrder, testcase ) : if len (columnOrder) == 0 :", "Note one (but not both) of these may coincide with # an existing", "that come from \"if\" clauses --- reverse sense for IfCond in ValueIfs :", "random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG single obligation: \", slot, value, kind)", "## may miss other features, including other special cases) ## * Not consider", "only one direction Outstanding = set() # All obligations, but only one direction", "and ## * Not consider any test case with more than one ##", "choice MultipleColumns = [ ] # Complement of SingleColumns -- pairs are from", "print_(\"Syntax error, expecting value, saw \", Token ) return [ \"--bogus--\"] value =", "list of condition names ValueIfs = [ ] # List of (value, slot,", "[ ] ## List of value sets Singles = [] ## List of", "\"<CAT>\" ValueToken = \"<VAL>\" IfToken = \"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\"", "loop will generate test cases, and the inner loops try to fulfill #", "[] ## List of its properties for cond in valDesc[1:] : kind =", "= valOf(cond) if kind == \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val", "eliminate those obligations, so we are creating ## a test suite to fill", "v2 in CategoriesValues[j] : j_item = (j, v2) obforward = (i_item, j_item) obbackward", "a test suite to fill in the remainder of the test ## obligations.", "ValueProps = { } # Map (slot,value) pair to list of condition names", "A case is a list (array) with n columns # # Representations: #", "so far? # def compatible( item, testcase ) : slot, val = item", ": Token = six.next(tokenStream) return [(\"single\", None)] + parseConditions() if tokenClass( Token )", "= ObsList.pop() s1, v1 = seedObligation[0] s2, v2 = seedObligation[1] testcase = MakeTuple(", "for reading and writing test suites ## Constants (other than tokens for parsing)", ") != ValueToken : print_(\"Syntax error, expecting value, saw \", Token ) return", "newList def CreateCase(): seedObligation = ObsList.pop() while seedObligation not in Outstanding: if (len(ObsList)", "list of test cases # An item is a tuple, and an itempair", ") : if len (columnOrder) == 0 : dbg_p(\"#DBG: *** Success: \", testcase)", "2006 --- Let's look at all the outstanding obligations # and choose the", ": dbg(\"write row \" , t ) csv_writer.writerow( t ) # ---------------- ##", "time by # always fixing these at the beginning of pairs generation, and", "which pairs of values have not # been covered: python genpairs.py --csv --initial-suite", "= six.next(tokenStream) ifcond = Token Token = six.next(tokenStream) return [(\"if\" , ifcond)] +", "category\") ## Error recovery to next category while tokenClass( Token ) != CategoryToken", "dbg(\"#DBG === Attempting tuple seeded with\", testcase) columnOrder = list(range( len(CategoriesList) ) )", "choice, i.e., for pairs # generation the value will be fixed. We can", "of conditions and the following disclaimer in the documentation and/or other materials provided", "License = \"\"\" (C) 2007,2017 University of Oregon and <NAME>. All rights reserved.", "error ## cases first, and ## * Not consider any test case with", "return IfToken if tok == \"prop\" : return PropToken if tok == \"except\"", "global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) == ErrorToken : Token =", ") ) random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase ) ) : Suite.append( testcase", "{} # Per column, both directions SingleColumns = [ ] # Columns with", "if tok == \"prop\" : return PropToken if tok == \"except\" : return", "otherwise) arising in any way out of the use of this software, even", "return commentPos = s.find(\"//\"); if commentPos >= 0 : s = s[0:commentPos] for", "val, slot, cond = ExceptCond for conflict_slot in PropsSlots[ cond ] : for", "nor the names of its contributors may be used to endorse or promote", "code must retain the above copyright notice, this list of conditions and the", "tuple ): return tuple[0] def valOf( tuple ): return tuple[1] # --------------- Build", "Outstanding: if (len(ObsList) == 0): return seedObligation = ObsList.pop() s1, v1 = seedObligation[0]", "dbg_p(\"#DBG: *** Success: \", testcase) return True dbg_p(\"#DBG * Attempting to complete\", testcase", "to next category\") ## Error recovery to next category while tokenClass( Token )", "the test case so far? # def compatible( item, testcase ) : slot,", "in CategoriesValues[ conflict_slot ] : if cond not in ValueProps[ (conflict_slot, cs_value) ]", "# ------------------------------------------------------------ # Print results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title ) :", ": s1, v1 = ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\"", ": value = value + 1 if testcase[s2] != v2 : for ccol", "== \"if\" : return IfToken if tok == \"prop\" : return PropToken if", "couldn't score any more obligations, can we at least ## fill in some", "columnOrder, \" in \", testcase) # How shall we fill this DontCare with", "columns : value = t[slot] print_(\"%15s\" % value , end=\"\") print_( \"\" )", "def initial_suite_clear( initial_suite ) : matches = False reader = csv.reader( open(initial_suite, \"r\"),", "--------------------------------------------------------- def MakeTuple ( len ): newList = [] for i in range(0,len):", "columns, descriptive_title ) def PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite), \"", "if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value + 1 if testcase[s2] !=", "from \"except\" clauses for ExceptCond in ValueExcepts : val, slot, cond = ExceptCond", "conflict_slot, cs_value)) def makeObligations() : if DBG: print_(\"--- Creating obligations list ---\") keys", "one # def getToken() : while 1: s = sys.stdin.readline() if not s:", "PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append( (val, slotNum, condVal ) ) elif", "marking val as a possible value of the property ## until we know", "build Suite = [ ] ## List of test cases ## Instrumentation INSTR_N_Comparisons", "DontCare and testcase[slot] != val) : return False for tslot in range(len(testcase)) :", "to have no effect def initial_suite_clear( initial_suite ) : matches = False reader", ") # Each item in the pair is a <slot,value> or <name,value> pair", "handling import random ## for shuffling lists import csv ## for reading and", "from optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot", "# format: python genpairs.py < foo.cp # To read a partial suite of", "default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by initial test suites. (Useful only with", "def valOf( tuple ): return tuple[1] # --------------- Build initial data structures ----", "tuple[1] # --------------- Build initial data structures ---- # Single columns are those", "): newList = [] for i in range(0,len): newList.append(DontCare) return newList def CreateCase():", "for vec in reader: if len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for", "= Token[0:-1] Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \", category, \"", "--initial)\"\"\") (UserOptions, UserArgs) = optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug : print_(\"Enabling", "len(CategoriesList) ) testcase[s1] = v1 testcase[s2] = v2 for slot in SingleColumns :", "way out of the use of this software, even if advised of the", ", t ) csv_writer.writerow( t ) # ---------------- ## Read an initial test", "we couldn't score any more obligations, can we at least ## fill in", "for v2 in CategoriesValues[j] : j_item = (j, v2) obforward = (i_item, j_item)", "initial test suite is not ## a suite of special and error cases.", "in\", initial_suite) print_(\" Column \", i, \"'\" + col + \"'\", \"not in", "PropsSlots = { } # For each property name, set of slots with", "they are useful in several places # def identifySingles() : for slot in", ") # --------------------------------------------------------- # # Is a given (slot,value) pair compatible with the", "special cases) ## * Not consider any pairs as being satisfied by a", "contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages", "a \"single\" or \"error\" choice, i.e., for pairs # generation the value will", "cand in candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1 = testcase[ s1 ]", "range ( i+1, len(testcase) ): ob = makePair(i, testcase[i], j, testcase[j]) if ob", "Token = \"<PASSWORD>\" tokenStream = getToken() def parse(): global Token global NCol Token", "] # Columns with just one (non-error, non-single) choice MultipleColumns = [ ]", "ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value", "msg_string = \" \".join(parts) Log.debug(msg_string) # Performance debug messages def dbg_p(*msg): if DBGp:", "a partial suite of test cases (tests.txt) in CSV format, # plus a", "seedObligation = ObsList.pop() while seedObligation not in Outstanding: if (len(ObsList) == 0): return", "def completeCase( columnOrder, testcase ) : if len (columnOrder) == 0 : dbg_p(\"#DBG:", "return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying columns \", columnOrder, \" in \",", "# # Representations: # A test case is represented as a list, indexed", "for file handling import random ## for shuffling lists import csv ## for", "Oregon and <NAME>. All rights reserved. Redistribution and use in source and binary", "s2, v2 ): return ((s1, v1), (s2, v2)) def reversePair( pair ): return", ": Excludes.add( makePair( slot, val, conflict_slot, cs_value)) # Excludes that come from \"if\"", "] colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob,", "csv format). Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\",", "optparse import OptionParser optparser = OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of", "are disclaimed. In no event shall the copyright owner or contributors be liable", "(true) or off (false) DBGp = False ## Performance debugging, December 2006 maxCandidates", "case) # An exclusion is a pair (the two items must not occur", "columns ) else: PrintAsText( columns, descriptive_title ) def PrintAsText( columns, descriptive_title ): print_(descriptive_title", "def CreateCase(): seedObligation = ObsList.pop() while seedObligation not in Outstanding: if (len(ObsList) ==", "in Singles: CreateSingle(single) def CreateSingle( single ): testcase = MakeTuple( len(CategoriesList) ) columnOrder", "= v2 for slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting", "==len(CategoriesList), set after parsing ## Temporary, for building excludes PropsSlots = { }", "list of obligations, some of which may # already have been fulfilled (deletion", "condVal) ) elif kind == \"error\" or kind == \"single\" : Singles.append( (val,", "exclusion is a pair (the two items must not occur together in any", "use of this software, even if advised of the possibility of such damage.", "data, or profits; or business interruption) however caused and on any theory of", "-v -p < foo.cp # To read the same as above, and then", "lazily. # # Exclude is a dictionary mapping items to lists of item.", "== EOF : return EOFToken if tok.endswith(\":\") : return CategoryToken if tok ==", "makePair(i, testcase[i], j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue +", "MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if in_schema_map[i] != -1 : trvec[in_schema_map[i]] =", "a single ## or error case. ## For now, we just assume that", "\", col, \" (already filled in)\") return completeCase( columnOrder[1:], testcase ) dbg(\"#DBG ***Trying", "(and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\",", "\"csv\", help = \"\"\"Output format is comma-separated-values (suitable as input to Excel and", "value + conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token )", "!= val) : return False for tslot in range(len(testcase)) : if ((slot, val),", "nslots = len(keys) for i in range(nslots): ObsByCol[i] = [] for i in", "value will be fixed. We can save some time by # always fixing", "All obligations, but only one direction ObsByCol = {} # Per column, both", "random ## for shuffling lists import csv ## for reading and writing test", "MultipleColumns : for v1 in CategoriesValues[i] : i_item = (i, v1) for j", "Trying any value, regardless of obligation\") for val in CategoriesValues[ col ] :", "(s2, v2)) def reversePair( pair ): return ( pair[1], pair[0] ) # Each", "debugging messages\"\"\" t = MakeTuple( NCol ) s1,v1 = ob[0] s2,v2 = ob[1]", "(in csv format). Often used together with -p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False,", "creating ## a test suite to fill in the remainder of the test", "initial suite \", initial_suite) print_(\"*** Expecting columns \", in_schema , \" but saw", "provided by the copyright holders and contributors \"as is\" and any express or", "by the copyright holders and contributors \"as is\" and any express or implied", "ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \"", "print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) > 0 : CreateCase() if UserOptions.varying", "obligations list. The other lists are # cleared lazily, when we bring up", "# import sys ## for file handling import random ## for shuffling lists", "columns only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles : Suite", "\") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\",", "from # the outstanding obligations list. The other lists are # cleared lazily,", "outstanding obligations # and choose the one with highest score. This is fairly", "List of (slot,value,kind) where kind is \"single\" or \"error\" Excludes = set() ##", "= [ str(x) for x in msg ] msg_string = \" \".join(parts) Log.debug(msg_string)", "better solutions, smaller = faster ## Platform compatibility # ---------------------------------------- import six #", "= columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column \", col, \"", "## List of category names (in order given) ## The CategoriesList can also", "application, and we assume special case processing ## may miss other features, including", "one (but not both) of these may coincide with # an existing element.", "if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase ) #", ": val = parseValue() dbg(\"#DBG (parsed value: \", val, \")\") values.append( val )", "smaller = faster ## Platform compatibility # ---------------------------------------- import six # Python 2", "case so far? # def compatible( item, testcase ) : slot, val =", "Excludes and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j", "Each item in the pair is a <slot,value> or <name,value> pair def slotOf(", "[ ] Token = tokenStream.next() print_(\"Resuming from\" , Token) category = Token[0:-1] Token", "coverage\" ) if UserOptions.singles : Suite = [ ] CreateSingles() PrintTable( range(len(CategoriesList)), \"Single", "candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1 = testcase[ s1 ] testcase[ s1", "only\" ) else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles : Suite =", "tokenClass( Token ) == ErrorToken : Token = six.next(tokenStream) return [(\"error\", None )]", "(tslot, testcase[tslot])) in Excludes: return False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False", "def print_required_pairs( ) : for ob in Outstanding : s1, v1 = ob[0]", "triples ## What we build Suite = [ ] ## List of test", "PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title ) def PrintAsText( columns, descriptive_title ):", "] = old_v1 testcase[ s2 ] = old_v2 ## If we couldn't score", "itself ## Postpone marking val as a possible value of the property ##", "((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value + 1 if testcase[s2] != v2", "python genpairs.py < foo.cp # To read a partial suite of test cases", "# An itempair is a pair (item, item), that is, ((slot, value), (slot,", "testcase ) : if len (columnOrder) == 0 : dbg_p(\"#DBG: *** Success: \",", "of Oregon and <NAME>. All rights reserved. Redistribution and use in source and", "pairs of values have not # been covered: python genpairs.py --csv --initial-suite tests.txt", "holders and contributors \"as is\" and any express or implied warranties, including, but", "University of Oregon and <NAME> # All rights reserved. # License = \"\"\"", "# this list so we don't have an unfortunate ordering. # Outstanding is", "debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then quit)\",", "pair ): return ( pair[1], pair[0] ) # Each item in the pair", "(after initialization above) ## ------------------------------------------------------------ # -- Respond to special diagnostic options --", "(name1, v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization above) ##", "'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not", "UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main", "will generate test cases, and the inner loops try to fulfill # as", "# we can save space in output by suppressing them. # (Note they", "Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"*** Value \", testCaseValue, testcase ) #", "0 : dbg_p(\"#DBG: *** Success: \", testcase) return True dbg_p(\"#DBG * Attempting to", "missing pairs: python genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\" # # An", "distribution. * Neither the name of the University of Oregon nor the names", "slotOf( tuple ): return tuple[0] def nameOf( tuple ): return tuple[0] def valOf(", "cleared lazily, when we bring up an obligation. # def clearObligations(testcase) : testCaseValue", "------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print a warning or error", ") elif kind == \"except\" : ValueExcepts.append( (val, slotNum, condVal) ) elif kind", "v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------", "s1, v1, s2, v2 ): return ((s1, v1), (s2, v2)) def reversePair( pair", "remainder of the test ## obligations. ## ## NOTE: Currently considering only pair", "than one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print", "of the test ## obligations. ## ## NOTE: Currently considering only pair obligations,", "which may # already have been fulfilled (deletion is lazy). We may scramble", "ExceptCond for conflict_slot in PropsSlots[ cond ] : for cs_value in CategoriesValues[ conflict_slot", "notice, this list of conditions and the following disclaimer. * Redistributions in binary", "# For each property name, set of slots with it CategoriesProps = {", ": if UserOptions.output_format == \"csv\" : PrintAsCSV( columns ) else: PrintAsText( columns, descriptive_title", "not in Outstanding: if (len(ObsList) == 0): return seedObligation = ObsList.pop() s1, v1", "\"Warning - No pair possible: \", testcase ) def CreateSingles(): for single in", "True else: dbg_p(\"#DBG *** Rolling back \", s1, s2) # Restore previous values", "we assume special case processing ## may miss other features, including other special", "UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList", "tok == \"single\" : return SingleToken if tok == \"error\" : return ErrorToken", "return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG No more conditions\") return [ ]", "depend on excludes, so call makeExcludes before # calling makeObligations # def makeExcludes()", "= makePair(i, testcase[i], j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue", "parsing) DontCare = \"_\" ## Configuration parameters DBG = False ## Debugging mode,", "value + 1 if testcase[s2] != v2 : for ccol in range( len(testcase)", "SingleToken = \"<SINGLE>\" EOFToken = EOF def tokenClass( tok ) : if tok", "( completeCase( columnOrder, testcase ) ) : Suite.append( testcase ) clearObligations( testcase )", "warning or error message concerning a particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg),", "---\") # When we complete a test case, we remove obligations from #", "condVal ) ) elif kind == \"except\" : ValueExcepts.append( (val, slotNum, condVal) )", "MakeTuple( NCol ) s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t", "dbg_p(\"#DBG ** Failing to fill column \", col , \" with \", testcase)", "No non-singular value choices for \", CategoriesList[slot], \"; Pairs generation will fail.\") elif", "value , end=\"\") print_( \"\" ) print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print", "= False ## Performance debugging, December 2006 maxCandidates = 50 ## Bigger =", "listed as a \"single\" or \"error\" choice, i.e., for pairs # generation the", "Suite.append( testcase ) clearObligations( testcase ) else: CaseMessage( \"Warning - No pair possible:", "-1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else: print_(\"*** Warning, format mismatch", "ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def makeObligations()", "\"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help = \"\"\"Output format is comma-separated-values (suitable", "and 3 compatibility from six import print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s',", "at single and error ## cases first, and ## * Not consider any", "print_(\"---------------------------\") # -- Main processing: Parse the script, execute, print -- parse() identifySingles()", "CategoriesList can also be considered the test case schema CategoriesValues = [ ]", "# def identifySingles() : for slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0", "return [ ] Token = tokenStream.next() print_(\"Resuming from\" , Token) category = Token[0:-1]", "# because they are useful in several places # def identifySingles() : for", "== \"except\" : return ExceptToken if tok == \"single\" : return SingleToken if", "CategoriesValues[j] : j_item = (j, v2) obforward = (i_item, j_item) obbackward = (j_item,", ": for ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value", "v1) for j in range(i+1,nslots) : ## if j in SingleColumns: continue ##", "by the application, and we assume special case processing ## may miss other", "or implied warranties, including, but not limited to, the implied warranties of merchantability", "0 : print_(\"Warning: No non-singular value choices for \", CategoriesList[slot], \"; Pairs generation", "v1 testcase[s2] = v2 for slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG", "list (array) with n columns # # Representations: # A test case is", "in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular value choices", "on G4 laptop), so now we # set a limit (maxCandidates) on number", "names of its contributors may be used to endorse or promote products derived", "ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF def tokenClass( tok )", "## * Not consider any test case with more than one ## single", "print the test vector in human-readable # format: python genpairs.py < foo.cp #", "of quick access: # ObsList is a list of obligations, some of which", "of substitute goods or services; loss of use, data, or profits; or business", "and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward)", "dbg_p(\"#DBG *** Rolling back \", s1, s2) # Restore previous values testcase[ s1", "print_(\"=== Pairs required for completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while", "# def makeExcludes() : # Excludes that come from \"except\" clauses for ExceptCond", "testcase[tslot]),(slot,val)) in Excludes: return False return True # --------------------------------------------------------- def MakeTuple ( len", "Suite : for slot in columns : value = t[slot] print_(\"%15s\" % value", "2007,2017 University of Oregon and <NAME>. All rights reserved. Redistribution and use in", "spreadsheets, genpairs with the -i option, and some other programs).\"\"\") optparser.add_option(\"-v\", \"--varying\", \"--varying-columns-only\",", "9 minutes wall time on G4 laptop), so now we # set a", "following disclaimer in the documentation and/or other materials provided with the distribution. *", "MakeTuple ( len ): newList = [] for i in range(0,len): newList.append(DontCare) return", "Token ) != CategoryToken : print_(\"Syntax error on \", Token, \" looking for", "= DontCare dbg_p(\"#DBG ** Failing to fill column \", col , \" with", "kind == \"error\" or kind == \"single\" : Singles.append( (val, slotNum, kind) )", "vec[i] clearObligations( trvec ) else: print_(\"*** Warning, format mismatch with initial suite \",", "end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert obligation to", "print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse the", "i+1, len(testcase) ): ob = makePair(i, testcase[i], j, testcase[j]) if ob in Outstanding:", "\"'\" + col + \"'\", \"not in specification\") in_schema_map.append(-1) for vec in reader:", "produce test cases covering 'single' or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default", "testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase ) # Score the # Note one", "conditions) return value + conditions def parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass(", "results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title ) : if UserOptions.output_format == \"csv\"", "of SingleColumns -- pairs are from these NCol = 0 # ==len(CategoriesList), set", "case. # # Data structures: # We will record obligations in three different", "(non-single columns) as well, # because they are useful in several places #", ", end=\"\") print_( \"\" ) print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print vectors", "[ ] ## List of category names (in order given) ## The CategoriesList", "def parseValues(): global Token dbg(\"#DBG (parseValues)\") values = [ ] while tokenClass( Token", "Token ) == ErrorToken : Token = six.next(tokenStream) return [(\"error\", None )] +", "implied warranties, including, but not limited to, the implied warranties of merchantability and", "## Bigger = better solutions, smaller = faster ## Platform compatibility # ----------------------------------------", "set() PropsSlots[condVal].add(slotNum) elif kind == \"if\" : ValueIfs.append( (val, slotNum, condVal ) )", "Suite.append( testcase ) else: CaseMessage( \"Warning - No pair possible: \", testcase )", "CategoryToken : print_(\"Syntax error on \", Token, \" looking for 'category:'\") print_(\"Skipping to", "initial_suite) print_(\"*** Expecting columns \", in_schema , \" but saw \", vec) #", "::= \", values) slotNum = len(CategoriesList) CategoriesList.append( category ) vlist = [ ]", "v1), (s2, v2)) def reversePair( pair ): return ( pair[1], pair[0] ) #", "tokens for parsing) DontCare = \"_\" ## Configuration parameters DBG = False ##", "excludes.) # # We'll identify the multiples (non-single columns) as well, # because", "+ parseConditions() dbg(\"#DBG No more conditions\") return [ ] # -------------- The form", "col ] = val if completeCase( columnOrder[1:], testcase ): return True else: testcase[", "are those in which all but one value is # listed as a", "now, we just assume that the initial test suite is not ## a", "SingleToken if tok == \"error\" : return ErrorToken return ValueToken # Generator to", "def slotOf( tuple ): return tuple[0] def nameOf( tuple ): return tuple[0] def", "in contract, strict liability, or tort (including negligence or otherwise) arising in any", ") parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\") values = [ ] while", "file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert obligation to vector", "tokenClass( Token ) == ValueToken : val = parseValue() dbg(\"#DBG (parsed value: \",", "in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList", "G4 laptop), so now we # set a limit (maxCandidates) on number of", "without modification, are permitted provided that the following conditions are met: * Redistributions", "] for slot in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t in", "v2 = seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2] =", "valDesc in values : val = valDesc[0] ## The name of the value", "on number of candidates considered colObs = ObsByCol[col] candidates = [ ] obindex", "be schema in_schema = reader.next() in_schema_map = [ ] for i in range(len(in_schema)):", "above copyright notice, this list of conditions and the following disclaimer. * Redistributions", "UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse", "All rights reserved. Redistribution and use in source and binary forms, with or", "considered the test case schema CategoriesValues = [ ] ## List of value", "not occur together in any case) # A case is a list (array)", "# An item is a pair (slot number, value) # An itempair is", "[ ] if tokenClass( Token ) != CategoryToken : print_(\"Syntax error on \",", "j in range ( i+1, len(testcase) ): ob = makePair(i, testcase[i], j, testcase[j])", "not limited to, the implied warranties of merchantability and fitness for a particular", "item if ( testcase[ slot ] != DontCare and testcase[slot] != val) :", "( pair[1], pair[0] ) # Each item in the pair is a <slot,value>", "<NAME> # All rights reserved. # License = \"\"\" (C) 2007,2017 University of", "line should be schema in_schema = reader.next() in_schema_map = [ ] for i", "= item if ( testcase[ slot ] != DontCare and testcase[slot] != val)", "case with more than one ## single or error value (we don't know", "len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i in range(len(vec)) : if in_schema_map[i] !=", "shall the copyright owner or contributors be liable for any direct, indirect, incidental,", "] ## List of value sets Singles = [] ## List of (slot,value,kind)", "Expecting columns \", in_schema , \" but saw \", vec) # ---------------- ##", "debug messages def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ## User arguments from", "[] for i in range(0,len): newList.append(DontCare) return newList def CreateCase(): seedObligation = ObsList.pop()", "six.next(tokenStream) return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG No more conditions\") return [", "some of which may # already have been fulfilled (deletion is lazy). We", "and print the test vector in human-readable # format: python genpairs.py < foo.cp", "\"'\", \"not in specification\") in_schema_map.append(-1) for vec in reader: if len(vec) == len(in_schema)", "else: CaseMessage( \"Warning - No pair possible: \", testcase ) def completeCase( columnOrder,", "don't know which will be handled ## by the application, and we assume", "as a \"single\" or \"error\" choice, i.e., for pairs # generation the value", "report which pairs of values have not # been covered: python genpairs.py --csv", "False # ------------------------------------------------------------ # Print Warnings (to stderr unless otherwise specified) # ------------------------------------------------------------", "a given (slot,value) pair compatible with the test case so far? # def", "reverse sense for IfCond in ValueIfs : val, slot, cond = IfCond for", "compatible((col,val), testcase) : testcase[ col ] = val if completeCase( columnOrder[1:], testcase ):", "specific prior written permission. This software is provided by the copyright holders and", "file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob ) :", "= seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2] = v2", "for slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded", "else: CaseMessage( \"Warning - No pair possible: \", testcase ) def CreateSingles(): for", "Bigger = better solutions, smaller = faster ## Platform compatibility # ---------------------------------------- import", "range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value + 1", "implied warranties of merchantability and fitness for a particular purpose are disclaimed. In", "val = item if ( testcase[ slot ] != DontCare and testcase[slot] !=", "value + 1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value + 1", "## def print_required_pairs( ) : for ob in Outstanding : s1, v1 =", "(category) # A test suite is a list of test cases # An", "documentation and/or other materials provided with the distribution. * Neither the name of", "slot, condition) triples ValueExcepts = [ ] # List of (value, slot, condition)", "not in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value))", "% ( word, tokenClass(word) ) ) yield word Token = \"<PASSWORD>\" tokenStream =", "is a singleton singleton = False ValueProps[ (slotNum, val) ] = [] ##", "CategoriesList[slot], \"; Pairs generation will fail.\") elif len(CategoriesValues[slot]) == 1 : SingleColumns.append(slot) else:", "obligations, some of which may # already have been fulfilled (deletion is lazy).", "condition names ValueIfs = [ ] # List of (value, slot, condition) triples", "six # Python 2 and 3 compatibility from six import print_ ## Logging", "may coincide with # an existing element. We'll only consider *added* value, #", "if col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\",", "[ ] # -------------- The form of a pair (obligation or exclusion) -----", "# Columns with just one (non-error, non-single) choice MultipleColumns = [ ] #", ") yield word Token = \"<PASSWORD>\" tokenStream = getToken() def parse(): global Token", "generation the value will be fixed. We can save some time by #", "= better solutions, smaller = faster ## Platform compatibility # ---------------------------------------- import six", "] : for cs_value in CategoriesValues[ conflict_slot ] : if cond not in", "No pair possible: \", testcase ) def CreateSingles(): for single in Singles: CreateSingle(single)", "ObsByCol[col] candidates = [ ] obindex = 0 while obindex < len(colObs) and", "## cases first, and ## * Not consider any test case with more", "for v1 in CategoriesValues[i] : i_item = (i, v1) for j in range(i+1,nslots)", "in source and binary forms, with or without modification, are permitted provided that", "len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token == EOF : return", "makePair( slot, val, conflict_slot, cs_value)) def makeObligations() : if DBG: print_(\"--- Creating obligations", ") testcase[s1] = v1 testcase[s2] = v2 for slot in SingleColumns : testcase[slot]", "print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations)", "singleton: vlist.append( val ) parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\") values =", "= optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG)", "= six.next(tokenStream) return [(\"error\", None )] + parseConditions() if tokenClass( Token ) ==", "also be considered the test case schema CategoriesValues = [ ] ## List", "## Debugging mode, on (true) or off (false) DBGp = False ## Performance", "dbg(\"*** Value \", testCaseValue, testcase ) # --------------------------------------------------------- # # Is a given", "completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) > 0 :", ": Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"except\" ,", "testcase[ s2 ] testcase[ s2 ] = v2 if completeCase( columnOrder[1:] , testcase", "t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ # Print results # ------------------------------------------------------------ def PrintTable(", "if not singleton: vlist.append( val ) parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\")", "= six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"prop\" , condname)] +", "== 0 : dbg_p(\"#DBG: *** Success: \", testcase) return True dbg_p(\"#DBG * Attempting", "may # already have been fulfilled (deletion is lazy). We may scramble #", "# ---------------------------------------- import six # Python 2 and 3 compatibility from six import", "word, tokenClass(word) ) ) yield word Token = \"<PASSWORD>\" tokenStream = getToken() def", "CategoriesProps[ category ] = [ ] for valDesc in values : val =", "writing test suites ## Constants (other than tokens for parsing) DontCare = \"_\"", "\"r\"), csv_dialect) ## Working yet? (No.) ## First line should be schema in_schema", "tests.txt -o -v -p < foo.cp # To read the same as above,", "time on G4 laptop), so now we # set a limit (maxCandidates) on", "varying column -- for v2 in CategoriesValues[j] : j_item = (j, v2) obforward", "and fitness for a particular purpose are disclaimed. In no event shall the", "we complete a test case, we remove obligations from # the outstanding obligations", "in CategoriesValues[ col ] : if compatible((col,val), testcase) : testcase[ col ] =", "EOF def tokenClass( tok ) : if tok == EOF : return EOFToken", "slot, cond = ExceptCond for conflict_slot in PropsSlots[ cond ] : for cs_value", "global Token global NCol Token = six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec():", "a dictionary mapping items to lists of item. # import sys ## for", "PropsSlots[ cond ] : for cs_value in CategoriesValues[ conflict_slot ] : if cond", "CategoryToken if tok == \"if\" : return IfToken if tok == \"prop\" :", "== IfToken : Token = six.next(tokenStream) ifcond = Token Token = six.next(tokenStream) return", ") dbg(\"#DBG ***Trying columns \", columnOrder, \" in \", testcase) # How shall", "in CSV format, # plus a test specification, and report which pairs of", "non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering", "t[slot] print_(\"%15s\" % value , end=\"\") print_( \"\" ) print_( \"\" ) def", "satisfied by a single ## or error case. ## For now, we just", "## or error case. ## For now, we just assume that the initial", "Redistributions of source code must retain the above copyright notice, this list of", "by column, also updated lazily. # # Exclude is a dictionary mapping items", "advised of the possibility of such damage. \"\"\" usage = \"\"\"Usage: # To", "some time by # always fixing these at the beginning of pairs generation,", "seedObligation[1] testcase = MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2] = v2 for", "parseConditions() if tokenClass( Token ) == PropToken : Token = six.next(tokenStream) condname =", "a particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col", "] : if cond not in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair(", "testcase[s1] != v1 : for ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in", "CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column", "\", i, \"'\" + col + \"'\", \"not in specification\") in_schema_map.append(-1) for vec", "-- pairs are from these NCol = 0 # ==len(CategoriesList), set after parsing", "csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [ ] for slot in columns :", "): for j in range ( i+1, len(testcase) ): ob = makePair(i, testcase[i],", "dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos = s.find(\"//\"); if commentPos >= 0", "in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value = value +", "\":\", len(Suite), \" test vectors\") print_(\"\") for slot in columns : parm =", "Redistribution and use in source and binary forms, with or without modification, are", "comma-separated values, for import into a spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print", "vec) # ---------------- ## Print the set of outstanding obligations. Typical use is", "is\" and any express or implied warranties, including, but not limited to, the", "= testCaseValue + 1 dbg(\"*** Value \", testCaseValue, testcase ) # --------------------------------------------------------- #", "regardless of obligation\") for val in CategoriesValues[ col ] : if compatible((col,val), testcase)", "# Generator to produce tokens, one by one # def getToken() : while", "in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column \", i, \"'\" +", "--csv --initial-suite tests.txt -o -v -p < foo.cp # To read the same", "SingleColumns: continue ## ## --- short cut doesn't work if only one varying", "such damage. \"\"\" usage = \"\"\"Usage: # To read a specification (foo.cp) and", "(c) 2007 University of Oregon and <NAME> # All rights reserved. # License", "# To read a partial suite of test cases (tests.txt) in CSV format,", "represented as a list, indexed by column (category) # A test suite is", "= [ ] obindex = 0 while obindex < len(colObs) and len(candidates) <", "possibility of such damage. \"\"\" usage = \"\"\"Usage: # To read a specification", "# -- Respond to special diagnostic options -- if UserOptions.license: print_(License) exit(0) if", "is a list (array) with n columns # # Representations: # A test", "[ ] ## List of test cases ## Instrumentation INSTR_N_Comparisons = 0 #", "if Token == EOF : return [ ] if tokenClass( Token ) !=", "for conflict_slot in PropsSlots[ cond ] : for cs_value in CategoriesValues[ conflict_slot ]", "testcase[ col ] = DontCare dbg_p(\"#DBG ** Failing to fill column \", col", "UserOptions.varying : PrintTable( MultipleColumns, \"Pairwise coverage, varying columns only\" ) else: PrintTable( range(len(CategoriesList)),", "2006 maxCandidates = 50 ## Bigger = better solutions, smaller = faster ##", "range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular value choices for", "not singleton: vlist.append( val ) parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\") values", "colObs.pop() else: if compatible(ob[0], testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase", "] testcase[ s2 ] = v2 if completeCase( columnOrder[1:] , testcase ): return", "not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"---", "given) ## The CategoriesList can also be considered the test case schema CategoriesValues", "] = val if completeCase( columnOrder[1:], testcase ): return True else: testcase[ col", "valDesc[0] ## The name of the value itself ## Postpone marking val as", "= sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\") yield EOF return commentPos =", "else: PrintTable( range(len(CategoriesList)), \"Pairwise coverage\" ) if UserOptions.singles : Suite = [ ]", "in range(len(vector)) : if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\",", "occur together in some case) # An exclusion is a pair (the two", "return values def parseValue(): global Token dbg(\"#DBG (parseValue, looking at \", Token, \")\")", "## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------ # -- Respond", ") print_( \"\" ) def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values, for", "DontCare = \"_\" ## Configuration parameters DBG = False ## Debugging mode, on", "## Primary data structures CategoriesList = [ ] ## List of category names", "ObsByCol is a dictionary obligations by column, also updated lazily. # # Exclude", "# An obligation is a pair (the two items must occur together in", "different forms of quick access: # ObsList is a list of obligations, some", ": print_(\"Syntax error on \", Token, \" looking for 'category:'\") print_(\"Skipping to next", "file using a simple LL parser ---- # Consts for token classification EOF", "complete, \", len(ObsList), \" obligations ---\") # When we complete a test case,", "but one value is # listed as a \"single\" or \"error\" choice, i.e.,", ": \"\"\"Print a warning or error message concerning a particular partially-defined test vector\"\"\"", "an itempair is a tuple # # Like AETG and several other covering", ") == EOF : print_(\"Discarding rest of file\") return [ ] Token =", "an initial test suite (or several), and ## eliminate those obligations, so we", "pair possible: \", testcase ) def completeCase( columnOrder, testcase ) : if len", "returns\", value + conditions) return value + conditions def parseConditions(): global Token dbg(\"#DBG", "] for valDesc in values : val = valDesc[0] ## The name of", "makeObligations # def makeExcludes() : # Excludes that come from \"except\" clauses for", "Generate an all-pairs covering test suite # # (c) 2007 University of Oregon", "* Neither the name of the University of Oregon nor the names of", "CategoryToken : if tokenClass( Token ) == EOF : print_(\"Discarding rest of file\")", "to next category while tokenClass( Token ) != CategoryToken : if tokenClass( Token", "testcase ): return True else: dbg_p(\"#DBG *** Rolling back \", s1, s2) #", "ObToVector( ob ) : \"\"\"Convert obligation to vector for debugging messages\"\"\" t =", ") : if UserOptions.output_format == \"csv\" : PrintAsCSV( columns ) else: PrintAsText( columns,", "merchantability and fitness for a particular purpose are disclaimed. In no event shall", ") singleton = True else : print_(\"*ERR* Unrecognized condition attribute:\", cond) if not", "Log.info(\"User options: \", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary", "SingleToken : Token = six.next(tokenStream) return [(\"single\", None)] + parseConditions() if tokenClass( Token", "return [(\"if\" , ifcond)] + parseConditions() if tokenClass( Token ) == PropToken :", "those obligations, so we are creating ## a test suite to fill in", "cases first, and ## * Not consider any test case with more than", "for i in range(nslots): ObsByCol[i] = [] for i in MultipleColumns : for", "= v2 if completeCase( columnOrder[1:] , testcase ): return True else: dbg_p(\"#DBG ***", "list of conditions and the following disclaimer in the documentation and/or other materials", "in reader: if len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i in", "for tslot in range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot])) in Excludes: return", "value = 1 ## For at least meeting one obligation ((s1, v1), (s2,", "testcase[ s1 ] = old_v1 testcase[ s2 ] = old_v2 ## If we", "(No.) ## First line should be schema in_schema = reader.next() in_schema_map = [", "parseConditions() if tokenClass( Token ) == SingleToken : Token = six.next(tokenStream) return [(\"single\",", "action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not produce test cases covering 'single' or", "test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col in range(len(vector)) :", "Token[0:-1] Token = six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \", category, \" ::=", "for i in range( len(testcase) ): for j in range ( i+1, len(testcase)", "default = [], dest=\"initial_suite\", help=\"\"\"Read initial test suite (in csv format). Often used", "PrintTable( columns, descriptive_title ) : if UserOptions.output_format == \"csv\" : PrintAsCSV( columns )", "CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ] for valDesc in values : val", "tok == \"prop\" : return PropToken if tok == \"except\" : return ExceptToken", "= six.next(tokenStream) return [(\"single\", None)] + parseConditions() if tokenClass( Token ) == IfToken", "in_schema_map.append(-1) for vec in reader: if len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList))", "\", in_schema , \" but saw \", vec) # ---------------- ## Print the", "ob, testcase ) # Score the # Note one (but not both) of", "] # All obligations, but only one direction Outstanding = set() # All", "v1), (s2, v2)) = ob if testcase[s1] != v1 : for ccol in", "((s1, v1), (s2, v2)) def reversePair( pair ): return ( pair[1], pair[0] )", "= Token Token = six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions() if tokenClass(", "[ ] for slot in columns : schema_row.append( CategoriesList[slot] ) csv_writer.writerow(schema_row) for t", "OptionParser(usage=usage) optparser.set_defaults(output_format=\"plain\") optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\")", "def PrintTable( columns, descriptive_title ) : if UserOptions.output_format == \"csv\" : PrintAsCSV( columns", "we just assume that the initial test suite is not ## a suite", "trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else: print_(\"*** Warning, format mismatch with initial", "## Print the set of outstanding obligations. Typical use is when ## we", "in excludes.) # # We'll identify the multiples (non-single columns) as well, #", "columnOrder, testcase ) ) : Suite.append( testcase ) clearObligations( testcase ) else: CaseMessage(", "in values : val = valDesc[0] ## The name of the value itself", "any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited", "List of category names (in order given) ## The CategoriesList can also be", "these at the beginning of pairs generation, and # we can save space", ": if cond not in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot,", "in any case) # A case is a list (array) with n columns", "fill in the remainder of the test ## obligations. ## ## NOTE: Currently", "concerning a particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for", "in candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1 = testcase[ s1 ] testcase[", "% (name1, v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after initialization above)", "{ } # For each category, all props on any values ValueProps =", "= t[slot] print_(\"%15s\" % value , end=\"\") print_( \"\" ) print_( \"\" )", "for building excludes PropsSlots = { } # For each property name, set", "dbg_p(\"#DBG * Attempting to complete\", testcase ) col = columnOrder[0] if testcase[col] !=", "dbg(\"write row \" , t ) csv_writer.writerow( t ) # ---------------- ## Read", "that is, ((slot, value), (slot, value)) # An obligation is a pair (the", "in range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot])) in Excludes: return False if", "the application, and we assume special case processing ## may miss other features,", ") == ExceptToken : Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream)", "(item, item), that is, ((slot, value), (slot, value)) # An obligation is a", "\"if\" clauses --- reverse sense for IfCond in ValueIfs : val, slot, cond", "List of (value, slot, condition) triples ValueExcepts = [ ] # List of", "of conditions and the following disclaimer. * Redistributions in binary form must reproduce", "a tuple, and an itempair is a tuple # # Like AETG and", "DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector(", "condVal not in PropsSlots : PropsSlots[condVal] = set() PropsSlots[condVal].add(slotNum) elif kind == \"if\"", "] ## List of test cases ## Instrumentation INSTR_N_Comparisons = 0 # ----------", "in some case) # An exclusion is a pair (the two items must", "any more obligations, can we at least ## fill in some compatible value", "# When we complete a test case, we remove obligations from # the", "def CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print a warning or error message", "then produce a test suite that # covers the missing pairs: python genpairs.py", "testcase ) def CreateSingles(): for single in Singles: CreateSingle(single) def CreateSingle( single ):", "be considered the test case schema CategoriesValues = [ ] ## List of", "0 while obindex < len(colObs) and len(candidates) < maxCandidates : ob = colObs[obindex]", ") == PropToken : Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream)", "disclaimer in the documentation and/or other materials provided with the distribution. * Neither", "error, expecting value, saw \", Token ) return [ \"--bogus--\"] value = [", "parseConditions(): global Token dbg(\"#DBG (parseConditions)\") if tokenClass( Token ) == ErrorToken : Token", "not singletons. We should look at single and error ## cases first, and", "see what is missing in an initial test suite. ## def print_required_pairs( )", "What we build Suite = [ ] ## List of test cases ##", "testcase ) clearObligations( testcase ) else: CaseMessage( \"Warning - No pair possible: \",", "initial_suite ) : matches = False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ##", "test case schema CategoriesValues = [ ] ## List of value sets Singles", "= \"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken =", "that come from \"except\" clauses for ExceptCond in ValueExcepts : val, slot, cond", "slotNum, condVal) ) elif kind == \"error\" or kind == \"single\" : Singles.append(", "PROGRAM (after initialization above) ## ------------------------------------------------------------ # -- Respond to special diagnostic options", "range( len(testcase) ): for j in range ( i+1, len(testcase) ): ob =", "ifcond)] + parseConditions() if tokenClass( Token ) == PropToken : Token = six.next(tokenStream)", "(10^20 takes about 9 minutes wall time on G4 laptop), so now we", "IfToken : Token = six.next(tokenStream) ifcond = Token Token = six.next(tokenStream) return [(\"if\"", "together in some case) # An exclusion is a pair (the two items", "## NOTE: Currently considering only pair obligations, ## not singletons. We should look", "pairs # generation the value will be fixed. We can save some time", "exemplary, or consequential damages (including, but not limited to, procurement of substitute goods", "the name of the University of Oregon nor the names of its contributors", "pairs not covered by initial test suites. (Useful only with --initial)\"\"\") (UserOptions, UserArgs)", "columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column \", col, \" (already", "for pairs # generation the value will be fixed. We can save some", "at \", Token, \")\") if tokenClass( Token ) != ValueToken : print_(\"Syntax error,", "of ((slot,value),(slot,value)) (not symmetric) ObsList = [ ] # All obligations, but only", "value if completeCase( columnOrder, testcase ) : Suite.append( testcase ) else: CaseMessage( \"Warning", "can we at least ## fill in some compatible value and move on?", "** Failing to fill column \", col , \" with \", testcase) return", "liability, or tort (including negligence or otherwise) arising in any way out of", "disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this", "test obligations as possible with each test case. # # Data structures: #", "= \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF def tokenClass( tok ) :", "Parse the script, execute, print -- parse() identifySingles() makeExcludes() makeObligations() for suite in", "] print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite :", "when ## we are trying to see what is missing in an initial", "lazy deletion of obligations; we # clip from the end of the list", "len(testcase) ): for j in range ( i+1, len(testcase) ): ob = makePair(i,", "if compatible((col,val), testcase) : testcase[ col ] = val if completeCase( columnOrder[1:], testcase", "tslot in range(len(testcase)) : if ((slot, val), (tslot, testcase[tslot])) in Excludes: return False", "CategoriesValues[ conflict_slot ] : if cond in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add(", "set() # All obligations, but only one direction ObsByCol = {} # Per", "= (j, v2) obforward = (i_item, j_item) obbackward = (j_item, i_item) if obforward", "--------------- Build initial data structures ---- # Single columns are those in which", "makePair( s1, v1, s2, v2 ): return ((s1, v1), (s2, v2)) def reversePair(", "(slot, value)) # An obligation is a pair (the two items must occur", "and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no effect", "= six.next(tokenStream) values = parseValues() dbg(\"#DBG Parsed: \", category, \" ::= \", values)", "we fill this DontCare with something useful? # Let's try for an outstanding", "in Outstanding): # Here is our lazy deletion of obligations; we # clip", "suite to fill in the remainder of the test ## obligations. ## ##", "colObs[obindex] if not (ob in Outstanding or reversePair(ob) in Outstanding): # Here is", "conflict_slot ] : if cond not in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add(", "slot in SingleColumns : testcase[slot] = CategoriesValues[slot][0] dbg(\"#DBG === Attempting tuple seeded with\",", "] : if cond in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot,", "completeCase( columnOrder[1:] , testcase ): return True else: dbg_p(\"#DBG *** Rolling back \",", "print_(\"%15s\" % value , end=\"\") print_( \"\" ) print_( \"\" ) def PrintAsCSV(columns):", "Exclude is a dictionary mapping items to lists of item. # import sys", "in three different data structures, # for different forms of quick access: #", "optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\", dest=\"output_format\", const = \"csv\", help = \"\"\"Output format is", "initial test suite. ## def print_required_pairs( ) : for ob in Outstanding :", "to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column \",", "mismatch with initial suite \", initial_suite) print_(\"*** Expecting columns \", in_schema , \"", "the distribution. * Neither the name of the University of Oregon nor the", ": return CategoryToken if tok == \"if\" : return IfToken if tok ==", "the pair is a <slot,value> or <name,value> pair def slotOf( tuple ): return", "of condition names ValueIfs = [ ] # List of (value, slot, condition)", "to list of condition names ValueIfs = [ ] # List of (value,", "*** Success: \", testcase) return True dbg_p(\"#DBG * Attempting to complete\", testcase )", "].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \" obligations ---\")", "class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no effect def initial_suite_clear( initial_suite )", "= six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"except\" , condname)] +", "return tuple[0] def nameOf( tuple ): return tuple[0] def valOf( tuple ): return", "while seedObligation not in Outstanding: if (len(ObsList) == 0): return seedObligation = ObsList.pop()", "an existing element. We'll only consider *added* value, # so we score the", "in an initial test suite. ## def print_required_pairs( ) : for ob in", "this software, even if advised of the possibility of such damage. \"\"\" usage", "we # clip from the end of the list dbg_p(\"#DBG * Lazy deletion\")", "array generators, the outer # loop will generate test cases, and the inner", "= {} # Per column, both directions SingleColumns = [ ] # Columns", "a test specification, and report which pairs of values have not # been", "== \"prop\" : return PropToken if tok == \"except\" : return ExceptToken if", "with initial suite \", initial_suite) print_(\"*** Expecting columns \", in_schema , \" but", "rest of file\") return [ ] Token = tokenStream.next() print_(\"Resuming from\" , Token)", "import print_ ## Logging # import logging logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING) Log = logging.getLogger(__name__) #", "software, even if advised of the possibility of such damage. \"\"\" usage =", "but only one direction ObsByCol = {} # Per column, both directions SingleColumns", "= 0 # ==len(CategoriesList), set after parsing ## Temporary, for building excludes PropsSlots", "if tokenClass( Token ) == ErrorToken : Token = six.next(tokenStream) return [(\"error\", None", "as above, and then produce a test suite that # covers the missing", "conditions = parseConditions() dbg(\"#DBG parseValue returns\", value + conditions) return value + conditions", "False reader = csv.reader( open(initial_suite, \"r\"), csv_dialect) ## Working yet? (No.) ## First", ": if len (columnOrder) == 0 : dbg_p(\"#DBG: *** Success: \", testcase) return", "cond in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value))", "\", columnOrder, \" in \", testcase) # How shall we fill this DontCare", "value = value + 1 candidates.append( (value, ob) ) obindex = obindex +", "(the two items must not occur together in any case) # A case", "(value, slot, condition) triples ## What we build Suite = [ ] ##", "# plus a test specification, and report which pairs of values have not", "useful? # Let's try for an outstanding obligation. # Dec 2006 --- Let's", "------------------------------------------------------------ # -- Respond to special diagnostic options -- if UserOptions.license: print_(License) exit(0)", "a simple LL parser ---- # Consts for token classification EOF = \"<EOF>\"", "help=\"\"\"Include only categories with more than one non-error and non-single value\"\"\") optparser.add_option(\"-s\", \"--singles\",", "testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"*** Value", "dbg(\"#DBG single obligation: \", slot, value, kind) testcase[slot] = value if completeCase( columnOrder,", "column (category) # A test suite is a list of test cases #", "valOf(cond) if kind == \"prop\" : CategoriesProps[ category ].append(condVal) ValueProps[ (slotNum, val )", "testcase) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase", "previous values testcase[ s1 ] = old_v1 testcase[ s2 ] = old_v2 ##", "\", candidates) for cand in candidates: (score, ((s1, v1),(s2,v2))) = cand old_v1 =", "is fairly expensive # (10^20 takes about 9 minutes wall time on G4", "from \"if\" clauses --- reverse sense for IfCond in ValueIfs : val, slot,", "else : print_(\"*ERR* Unrecognized condition attribute:\", cond) if not singleton: vlist.append( val )", "of the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[ len(colObs) - 1", "if len (columnOrder) == 0 : dbg_p(\"#DBG: *** Success: \", testcase) return True", "a pair (item, item), that is, ((slot, value), (slot, value)) # An obligation", "testcase ) else: CaseMessage( \"Warning - No pair possible: \", testcase ) def", "promote products derived from this software without specific prior written permission. This software", "Instrumentation INSTR_N_Comparisons = 0 # ---------- Read spec file using a simple LL", "DontCare: dbg_p(\"#DBG * Skipping column \", col, \" (already filled in)\") return completeCase(", "in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value = value +", "CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column \", i, \"'\"", "generation, and # we can save space in output by suppressing them. #", "-- for v2 in CategoriesValues[j] : j_item = (j, v2) obforward = (i_item,", "# set a limit (maxCandidates) on number of candidates considered colObs = ObsByCol[col]", "value\"\"\") optparser.add_option(\"-s\", \"--singles\", \"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering 'error'", "tokens, one by one # def getToken() : while 1: s = sys.stdin.readline()", "occur together in any case) # A case is a list (array) with", "values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not produce test cases", "and testcase[slot] != val) : return False for tslot in range(len(testcase)) : if", "\"--singles-only\", action=\"store_false\", default=True, dest=\"combinations\", help=\"\"\"Print only test cases covering 'error' and 'single' values.\"\"\")", "clauses --- reverse sense for IfCond in ValueIfs : val, slot, cond =", "\" obligations ---\") # When we complete a test case, we remove obligations", "\"except\" : return ExceptToken if tok == \"single\" : return SingleToken if tok", "\"<PASSWORD>\" tokenStream = getToken() def parse(): global Token global NCol Token = six.next(tokenStream)", "= \"\"\"Output format is comma-separated-values (suitable as input to Excel and other spreadsheets,", "in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot, cs_value)) def", "== EOF : return [ ] if tokenClass( Token ) != CategoryToken :", "all but one value is # listed as a \"single\" or \"error\" choice,", "if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column \", col, \" (already filled", "1 if testcase[s2] != v2 : for ccol in range( len(testcase) ): if", "# clip from the end of the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex]", "list so we don't have an unfortunate ordering. # Outstanding is a set", "processing ## may miss other features, including other special cases) ## * Not", "consider any pairs as being satisfied by a single ## or error case.", "print_(\"=====================================\") if UserOptions.combinations : while len(ObsList) > 0 : CreateCase() if UserOptions.varying :", "EOF : print_(\"Discarding rest of file\") return [ ] Token = tokenStream.next() print_(\"Resuming", "incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of", "we can save space in output by suppressing them. # (Note they may", "Suite : dbg(\"write row \" , t ) csv_writer.writerow( t ) # ----------------", "return [(\"single\", None)] + parseConditions() if tokenClass( Token ) == IfToken : Token", "suite is a list of test cases # An item is a tuple,", "pair (obligation or exclusion) ----- def makePair( s1, v1, s2, v2 ): return", "with n columns # # Representations: # A test case is represented as", "(not symmetric) ObsList = [ ] # All obligations, but only one direction", ": return ExceptToken if tok == \"single\" : return SingleToken if tok ==", "look at all the outstanding obligations # and choose the one with highest", "= in_schema[i] if col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema", "element. We'll only consider *added* value, # so we score the *new* parts", "six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token", "ValueToken : print_(\"Syntax error, expecting value, saw \", Token ) return [ \"--bogus--\"]", "j in SingleColumns: continue ## ## --- short cut doesn't work if only", "= s[0:commentPos] for word in s.split() : dbg(\"#DBG <<%s: %s>>\" % ( word,", "# List of (value, slot, condition) triples ValueExcepts = [ ] # List", "for parsing) DontCare = \"_\" ## Configuration parameters DBG = False ## Debugging", "testCaseValue = 0 for i in range( len(testcase) ): for j in range", "save some time by # always fixing these at the beginning of pairs", "covered: python genpairs.py --csv --initial-suite tests.txt -o -v -p < foo.cp # To", "((slot,value),(slot,value)) (not symmetric) ObsList = [ ] # All obligations, but only one", "# -------------- The form of a pair (obligation or exclusion) ----- def makePair(", "-p\"\"\") optparser.add_option(\"-p\", \"--pairs\", \"--print-pairs\", action=\"store_true\", default=False, dest=\"pairs\", help=\"\"\"Report pairs not covered by initial", "dbg(\"#DBG No more conditions\") return [ ] # -------------- The form of a", "for slot in columns : value = t[slot] print_(\"%15s\" % value , end=\"\")", "csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [ ] for slot in", "# for different forms of quick access: # ObsList is a list of", "): ob = makePair(i, testcase[i], j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue", "UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug)", "participate in excludes.) # # We'll identify the multiples (non-single columns) as well,", "IfToken if tok == \"prop\" : return PropToken if tok == \"except\" :", "(the two items must occur together in some case) # An exclusion is", "s2 ] testcase[ s2 ] = v2 if completeCase( columnOrder[1:] , testcase ):", "ObsList is a list of obligations, some of which may # already have", "to see what is missing in an initial test suite. ## def print_required_pairs(", "category ].append(condVal) ValueProps[ (slotNum, val ) ].append(condVal) if condVal not in PropsSlots :", "list of conditions and the following disclaimer. * Redistributions in binary form must", "PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite), \" test vectors\") print_(\"\") for", "an initial test suite. ## def print_required_pairs( ) : for ob in Outstanding", "CaseMessage( \"Warning - No pair possible: \", testcase ) def CreateSingles(): for single", "warranties of merchantability and fitness for a particular purpose are disclaimed. In no", "both directions SingleColumns = [ ] # Columns with just one (non-error, non-single)", "and report which pairs of values have not # been covered: python genpairs.py", "singleton singleton = False ValueProps[ (slotNum, val) ] = [] ## List of", "pairs generation, and # we can save space in output by suppressing them.", "EOF : return [ ] if tokenClass( Token ) != CategoryToken : print_(\"Syntax", "--initial-suite tests.txt < foo.cp \"\"\" # # An item is a pair (slot", "[ Token ] Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\", value", "# calling makeObligations # def makeExcludes() : # Excludes that come from \"except\"", "vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\" for col in range(len(vector)) : if", "] ## List of category names (in order given) ## The CategoriesList can", "of test cases # An item is a tuple, and an itempair is", "PropToken : Token = six.next(tokenStream) condname = Token Token = six.next(tokenStream) return [(\"prop\"", "# # We'll identify the multiples (non-single columns) as well, # because they", "error case. ## For now, we just assume that the initial test suite", "UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs)", "csv_dialect) ## Working yet? (No.) ## First line should be schema in_schema =", "t[s2]=v2 return t # ------------------------------------------------------------ # Print results # ------------------------------------------------------------ def PrintTable( columns,", "default=False, dest=\"varying\", help=\"\"\"Include only categories with more than one non-error and non-single value\"\"\")", "particular purpose are disclaimed. In no event shall the copyright owner or contributors", "= six.next(tokenStream) return [(\"if\" , ifcond)] + parseConditions() if tokenClass( Token ) ==", "or off (false) DBGp = False ## Performance debugging, December 2006 maxCandidates =", "on excludes, so call makeExcludes before # calling makeObligations # def makeExcludes() :", "This is fairly expensive # (10^20 takes about 9 minutes wall time on", "that # covers the missing pairs: python genpairs.py --csv --initial-suite tests.txt < foo.cp", "condname = Token Token = six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions() if", "= value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value +", "## fill in some compatible value and move on? dbg_p(\"#DBG *** Trying any", "(parseValue, looking at \", Token, \")\") if tokenClass( Token ) != ValueToken :", "* Redistributions in binary form must reproduce the above copyright notice, this list", ") def PrintAsCSV(columns): \"\"\" Print vectors as comma-separated values, for import into a", "action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\", help=\"Print license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\")", "any pairs as being satisfied by a single ## or error case. ##", "= [] ## List of its properties for cond in valDesc[1:] : kind", "obligation to vector for debugging messages\"\"\" t = MakeTuple( NCol ) s1,v1 =", "= { } # For each property name, set of slots with it", "other CSV-consuming application. \"\"\" dbg(\"Print as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel )", "== ErrorToken : Token = six.next(tokenStream) return [(\"error\", None )] + parseConditions() if", "form of a pair (obligation or exclusion) ----- def makePair( s1, v1, s2,", "those in which all but one value is # listed as a \"single\"", "cond not in ValueProps[ (conflict_slot, cs_value) ] : Excludes.add( makePair( slot, val, conflict_slot,", "columns) as well, # because they are useful in several places # def", "consider *added* value, # so we score the *new* parts only. value =", "on? dbg_p(\"#DBG *** Trying any value, regardless of obligation\") for val in CategoriesValues[", "all the outstanding obligations # and choose the one with highest score. This", "(val, slotNum, condVal ) ) elif kind == \"except\" : ValueExcepts.append( (val, slotNum,", "with\", testcase) columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase( columnOrder,", "testcase[s1] = v1 testcase[s2] = v2 for slot in SingleColumns : testcase[slot] =", "laptop), so now we # set a limit (maxCandidates) on number of candidates", "v2 if completeCase( columnOrder[1:] , testcase ): return True else: dbg_p(\"#DBG *** Rolling", "\", category, \" ::= \", values) slotNum = len(CategoriesList) CategoriesList.append( category ) vlist", ": val = valDesc[0] ## The name of the value itself ## Postpone", "conflict_slot, cs_value)) # Excludes that come from \"if\" clauses --- reverse sense for", ": parm = CategoriesList[ slot ] print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60)", "always fixing these at the beginning of pairs generation, and # we can", "value sets Singles = [] ## List of (slot,value,kind) where kind is \"single\"", "value itself ## Postpone marking val as a possible value of the property", "( testcase[ slot ] != DontCare and testcase[slot] != val) : return False", "tuple # # Like AETG and several other covering array generators, the outer", "UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse the script, execute, print", "if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList =", "substitute goods or services; loss of use, data, or profits; or business interruption)", "keys = CategoriesList nslots = len(keys) for i in range(nslots): ObsByCol[i] = []", "for val in CategoriesValues[ col ] : if compatible((col,val), testcase) : testcase[ col", "in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i] clearObligations( trvec ) else: print_(\"*** Warning,", "singletons. We should look at single and error ## cases first, and ##", "of the possibility of such damage. \"\"\" usage = \"\"\"Usage: # To read", ": print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures CategoriesList = [ ]", "\", s1, s2) # Restore previous values testcase[ s1 ] = old_v1 testcase[", "No pair possible: \", testcase ) def completeCase( columnOrder, testcase ) : if", "= six.next(tokenStream) parseSpec() NCol = len(CategoriesList) def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if", "# def clearObligations(testcase) : testCaseValue = 0 for i in range( len(testcase) ):", "descriptive_title ) def PrintAsText( columns, descriptive_title ): print_(descriptive_title + \":\", len(Suite), \" test", "\"\"\"Output format is comma-separated-values (suitable as input to Excel and other spreadsheets, genpairs", "): print_(descriptive_title + \":\", len(Suite), \" test vectors\") print_(\"\") for slot in columns", "the copyright owner or contributors be liable for any direct, indirect, incidental, special,", "\"\"\"Print a warning or error message concerning a particular partially-defined test vector\"\"\" print_(", "= six.next(tokenStream) return [(\"prop\" , condname)] + parseConditions() if tokenClass( Token ) ==", "v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2)) ## ------------------------------------------------------------", "for import into a spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print as CSV\")", "def parseSpec(): global Token dbg(\"#DBG (parseSpec)\") if Token == EOF : return [", "this software without specific prior written permission. This software is provided by the", "retain the above copyright notice, this list of conditions and the following disclaimer.", "help=\"\"\"Print only test cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True,", "\", Token ) return [ \"--bogus--\"] value = [ Token ] Token =", "CategoriesList[ slot ] print_(\"%15s\" % parm , end=\"\") print_(\"\") print_(\"_\"*60) for t in", "be fixed. We can save some time by # always fixing these at", "for i in MultipleColumns : for v1 in CategoriesValues[i] : i_item = (i,", "of candidates considered colObs = ObsByCol[col] candidates = [ ] obindex = 0", "for valDesc in values : val = valDesc[0] ## The name of the", "<slot,value> or <name,value> pair def slotOf( tuple ): return tuple[0] def nameOf( tuple", "slot in range(len(CategoriesList)) : if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular value", "'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial test", "# ---------------- ## Read an initial test suite (or several), and ## eliminate", "] : for cs_value in CategoriesValues[ conflict_slot ] : if cond in ValueProps[", ") random.shuffle(columnOrder) value, slot, kind = single dbg(\"#DBG single obligation: \", slot, value,", "slot, condition) triples ## What we build Suite = [ ] ## List", "parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\") values = [ ] while tokenClass(", "- No pair possible: \", testcase ) def completeCase( columnOrder, testcase ) :", "Token ) == SingleToken : Token = six.next(tokenStream) return [(\"single\", None)] + parseConditions()", "takes about 9 minutes wall time on G4 laptop), so now we #", "== \"if\" : ValueIfs.append( (val, slotNum, condVal ) ) elif kind == \"except\"", "so we are creating ## a test suite to fill in the remainder", "in some compatible value and move on? dbg_p(\"#DBG *** Trying any value, regardless", "value, kind) testcase[slot] = value if completeCase( columnOrder, testcase ) : Suite.append( testcase", "dictionary obligations by column, also updated lazily. # # Exclude is a dictionary", "s1 ] testcase[ s1 ] = v1 old_v2 = testcase[ s2 ] testcase[", "with each test case. # # Data structures: # We will record obligations", "test vector in human-readable # format: python genpairs.py < foo.cp # To read", "reserved. Redistribution and use in source and binary forms, with or without modification,", "output by suppressing them. # (Note they may still participate in excludes.) #", "if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \"", "values, for import into a spreadsheet or other CSV-consuming application. \"\"\" dbg(\"Print as", "\"--license\", help=\"Print license terms (and then quit)\", action=\"store_true\",default=False, dest=\"license\") optparser.add_option(\"--csv\", \"-c\", \"--comma-separated-values\", action=\"store_const\",", "are # cleared lazily, when we bring up an obligation. # def clearObligations(testcase)", "should be schema in_schema = reader.next() in_schema_map = [ ] for i in", "category ) vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category ] = [ ]", "the obligations still outstanding. # ObsByCol is a dictionary obligations by column, also", "tokenClass( Token ) == PropToken : Token = six.next(tokenStream) condname = Token Token", ": ## if j in SingleColumns: continue ## ## --- short cut doesn't", "1 if ((ccol,testcase[ccol]),(s1,v1)) in Outstanding : value = value + 1 if testcase[s2]", "other special cases) ## * Not consider any pairs as being satisfied by", "s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ # Print results #", "i in range(len(vec)) : if in_schema_map[i] != -1 : trvec[in_schema_map[i]] = vec[i] clearObligations(", "suite of test cases (tests.txt) in CSV format, # plus a test specification,", "test ## obligations. ## ## NOTE: Currently considering only pair obligations, ## not", "-- if UserOptions.license: print_(License) exit(0) if UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug:", "name, set of slots with it CategoriesProps = { } # For each", "CreateSingle( single ): testcase = MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList) )", "structures CategoriesList = [ ] ## List of category names (in order given)", "print_(\"Discarding rest of file\") return [ ] Token = tokenStream.next() print_(\"Resuming from\" ,", "optparser.add_option(\"-d\", \"--debug\", help=\"Print a lot of debugging messages\", action=\"store_true\", default=False, dest=\"debug\") optparser.add_option(\"-l\", \"--license\",", "six.next(tokenStream) return [(\"error\", None )] + parseConditions() if tokenClass( Token ) == SingleToken", "Constants (other than tokens for parsing) DontCare = \"_\" ## Configuration parameters DBG", "action=\"store_true\", default=False, dest=\"varying\", help=\"\"\"Include only categories with more than one non-error and non-single", "of such damage. \"\"\" usage = \"\"\"Usage: # To read a specification (foo.cp)", "] for i in range(len(in_schema)): col = in_schema[i] if col in CategoriesList: to_col", "Outstanding.add(obforward) ObsByCol[ i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList),", "= \"\"\"Usage: # To read a specification (foo.cp) and print the test vector", "and binary forms, with or without modification, are permitted provided that the following", "as CSV\") csv_writer = csv.writer( sys.stdout, dialect=csv.excel ) schema_row = [ ] for", "---------------- ## Read an initial test suite (or several), and ## eliminate those", "---- # Single columns are those in which all but one value is", "triples ValueExcepts = [ ] # List of (value, slot, condition) triples ##", "the *new* parts only. value = 1 ## For at least meeting one", "* Not consider any test case with more than one ## single or", "of special and error cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have", "for word in s.split() : dbg(\"#DBG <<%s: %s>>\" % ( word, tokenClass(word) )", ", \" with \", testcase) return False # ------------------------------------------------------------ # Print Warnings (to", "single in Singles: CreateSingle(single) def CreateSingle( single ): testcase = MakeTuple( len(CategoriesList) )", ") : \"\"\"Convert obligation to vector for debugging messages\"\"\" t = MakeTuple( NCol", "def CreateSingle( single ): testcase = MakeTuple( len(CategoriesList) ) columnOrder = list(range( len(CategoriesList)", "a dictionary obligations by column, also updated lazily. # # Exclude is a", "in_schema[i] if col in CategoriesList: to_col = CategoriesList.index(col) in_schema_map.append(to_col) else: print_(\"Warning: schema mismatch", "UserOptions.pairs : print_(\"=== Pairs required for completion ===\" ) print_required_pairs() print_(\"=====================================\") if UserOptions.combinations", "j_item = (j, v2) obforward = (i_item, j_item) obbackward = (j_item, i_item) if", "obligations. Typical use is when ## we are trying to see what is", "---------- Read spec file using a simple LL parser ---- # Consts for", "\"<IF>\" PropToken = \"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\"", "shuffling lists import csv ## for reading and writing test suites ## Constants", "than tokens for parsing) DontCare = \"_\" ## Configuration parameters DBG = False", "'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do not produce test", "return False return True # --------------------------------------------------------- def MakeTuple ( len ): newList =", "from the end of the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[", "try for an outstanding obligation. # Dec 2006 --- Let's look at all", "(parseValues)\") values = [ ] while tokenClass( Token ) == ValueToken : val", "getToken() def parse(): global Token global NCol Token = six.next(tokenStream) parseSpec() NCol =", "vec in reader: if len(vec) == len(in_schema) : trvec = MakeTuple(len(CategoriesList)) for i", "def tokenClass( tok ) : if tok == EOF : return EOFToken if", "are useful in several places # def identifySingles() : for slot in range(len(CategoriesList))", "+ parseConditions() if tokenClass( Token ) == PropToken : Token = six.next(tokenStream) condname", "of liability, whether in contract, strict liability, or tort (including negligence or otherwise)", "before # calling makeObligations # def makeExcludes() : # Excludes that come from", "else: MultipleColumns.append(slot) # Obligations depend on excludes, so call makeExcludes before # calling", "testcase = MakeTuple( len(CategoriesList) ) testcase[s1] = v1 testcase[s2] = v2 for slot", "in columns : parm = CategoriesList[ slot ] print_(\"%15s\" % parm , end=\"\")", "\" in \", testcase) # How shall we fill this DontCare with something", "pair[1], pair[0] ) # Each item in the pair is a <slot,value> or", "owner or contributors be liable for any direct, indirect, incidental, special, exemplary, or", "including, but not limited to, the implied warranties of merchantability and fitness for", "cond in valDesc[1:] : kind = nameOf(cond) condVal = valOf(cond) if kind ==", "is our lazy deletion of obligations; we # clip from the end of", "the # Note one (but not both) of these may coincide with #", "getToken() : while 1: s = sys.stdin.readline() if not s: dbg(\"#DBG <<EOF reached>>\")", "else: print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column \", i, \"'\" + col", "obligations still outstanding. # ObsByCol is a dictionary obligations by column, also updated", "testcase[ s2 ] = old_v2 ## If we couldn't score any more obligations,", "[ \"--bogus--\"] value = [ Token ] Token = six.next(tokenStream) conditions = parseConditions()", "endorse or promote products derived from this software without specific prior written permission.", "columnOrder = list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase )", "Python 2 and 3 compatibility from six import print_ ## Logging # import", "single dbg(\"#DBG single obligation: \", slot, value, kind) testcase[slot] = value if completeCase(", "= { } # Map (slot,value) pair to list of condition names ValueIfs", "attribute:\", cond) if not singleton: vlist.append( val ) parseSpec() def parseValues(): global Token", "pair is a <slot,value> or <name,value> pair def slotOf( tuple ): return tuple[0]", "list. The other lists are # cleared lazily, when we bring up an", "University of Oregon nor the names of its contributors may be used to", "print_(\"Warning: schema mismatch in\", initial_suite) print_(\" Column \", i, \"'\" + col +", "Not consider any test case with more than one ## single or error", "dbg(\"--- ObsList complete, \", len(ObsList), \" obligations ---\") # When we complete a", "CreateCase(): seedObligation = ObsList.pop() while seedObligation not in Outstanding: if (len(ObsList) == 0):", "many test obligations as possible with each test case. # # Data structures:", "\", len(ObsList), \" obligations ---\") # When we complete a test case, we", "col in range(len(vector)) : if vector[col] == DontCare : print_(sep+\"_\",end=\"\", file=dest) else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]),", "than one ## single or error value (we don't know which will be", "for x in msg ] msg_string = \" \".join(parts) Log.debug(msg_string) # Performance debug", "all props on any values ValueProps = { } # Map (slot,value) pair", "CaseMessage( \"Warning - No pair possible: \", testcase ) def completeCase( columnOrder, testcase", "in the pair is a <slot,value> or <name,value> pair def slotOf( tuple ):", "# To read the same as above, and then produce a test suite", "UserOptions.debug: print_(\"---------------------------\") print_(\"Options in effect: \") print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying)", "# loop will generate test cases, and the inner loops try to fulfill", "obindex < len(colObs) and len(candidates) < maxCandidates : ob = colObs[obindex] if not", ", end=\"\") print_(\"\") print_(\"_\"*60) for t in Suite : for slot in columns", ") == ValueToken : val = parseValue() dbg(\"#DBG (parsed value: \", val, \")\")", "fixing these at the beginning of pairs generation, and # we can save", "services; loss of use, data, or profits; or business interruption) however caused and", "## for file handling import random ## for shuffling lists import csv ##", "print_(\"debug: \", UserOptions.debug) print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite)", "obligations from # the outstanding obligations list. The other lists are # cleared", "and other spreadsheets, genpairs with the -i option, and some other programs).\"\"\") optparser.add_option(\"-v\",", "< len(colObs) and len(candidates) < maxCandidates : ob = colObs[obindex] if not (ob", "kind is \"single\" or \"error\" Excludes = set() ## Set of ((slot,value),(slot,value)) (not", "produce a test suite that # covers the missing pairs: python genpairs.py --csv", "and any express or implied warranties, including, but not limited to, the implied", "\" ::= \", values) slotNum = len(CategoriesList) CategoriesList.append( category ) vlist = [", "For now, we just assume that the initial test suite is not ##", "CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print a warning or error message concerning", "genpairs.py < foo.cp # To read a partial suite of test cases (tests.txt)", "a set of all the obligations still outstanding. # ObsByCol is a dictionary", "j_item) obbackward = (j_item, i_item) if obforward not in Excludes and obbackward not", "and use in source and binary forms, with or without modification, are permitted", "# Complement of SingleColumns -- pairs are from these NCol = 0 #", "i ].append(obforward) ObsByCol[ j ].append(obbackward) random.shuffle(ObsList) dbg(\"--- ObsList complete, \", len(ObsList), \" obligations", "Lazy deletion\") colObs[obindex] = colObs[ len(colObs) - 1 ] colObs.pop() else: if compatible(ob[0],", "False if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return True # --------------------------------------------------------- def", "Token ) == ValueToken : val = parseValue() dbg(\"#DBG (parsed value: \", val,", "processing: Parse the script, execute, print -- parse() identifySingles() makeExcludes() makeObligations() for suite", "liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising", "kind) ) singleton = True else : print_(\"*ERR* Unrecognized condition attribute:\", cond) if", ") # Score the # Note one (but not both) of these may", "comma-separated-values (suitable as input to Excel and other spreadsheets, genpairs with the -i", "= \"csv\", help = \"\"\"Output format is comma-separated-values (suitable as input to Excel", "considered colObs = ObsByCol[col] candidates = [ ] obindex = 0 while obindex", ": print_(\"Warning: No non-singular value choices for \", CategoriesList[slot], \"; Pairs generation will", "testcase) # How shall we fill this DontCare with something useful? # Let's", "suite is not ## a suite of special and error cases. ## class", "print_(\"\") print_(\"_\"*60) for t in Suite : for slot in columns : value", "Data structures: # We will record obligations in three different data structures, #", "tok.endswith(\":\") : return CategoryToken if tok == \"if\" : return IfToken if tok", "for j in range ( i+1, len(testcase) ): ob = makePair(i, testcase[i], j,", "--- Let's look at all the outstanding obligations # and choose the one", "ob[0] name1=CategoriesList[s1] s2, v2 = ob[1] name2=CategoriesList[s2] print_(\"%s=%s, %s=%s\" % (name1, v1, name2,", "is a pair (the two items must occur together in some case) #", "value: \", val, \")\") values.append( val ) return values def parseValue(): global Token", "j, testcase[j]) if ob in Outstanding: Outstanding.remove(ob) testCaseValue = testCaseValue + 1 dbg(\"***", "any value, regardless of obligation\") for val in CategoriesValues[ col ] : if", "= getToken() def parse(): global Token global NCol Token = six.next(tokenStream) parseSpec() NCol", "for ccol in range( len(testcase) ): if ((s1,v1),(ccol,testcase[ccol])) in Outstanding : value =", "cases. ## class csv_dialect(csv.excel): skipinitialspace=True ## Seems to have no effect def initial_suite_clear(", "ValueExcepts.append( (val, slotNum, condVal) ) elif kind == \"error\" or kind == \"single\"", "end of the list dbg_p(\"#DBG * Lazy deletion\") colObs[obindex] = colObs[ len(colObs) -", "else: print_(\"*** Warning, format mismatch with initial suite \", initial_suite) print_(\"*** Expecting columns", "Working yet? (No.) ## First line should be schema in_schema = reader.next() in_schema_map", "inner loops try to fulfill # as many test obligations as possible with", "tests.txt < foo.cp \"\"\" # # An item is a pair (slot number,", "Print results # ------------------------------------------------------------ def PrintTable( columns, descriptive_title ) : if UserOptions.output_format ==", "in range ( i+1, len(testcase) ): ob = makePair(i, testcase[i], j, testcase[j]) if", "rights reserved. # License = \"\"\" (C) 2007,2017 University of Oregon and <NAME>.", "obligations, but only one direction ObsByCol = {} # Per column, both directions", "len(colObs) and len(candidates) < maxCandidates : ob = colObs[obindex] if not (ob in", "): return tuple[0] def nameOf( tuple ): return tuple[0] def valOf( tuple ):", "the test vector in human-readable # format: python genpairs.py < foo.cp # To", "= len(CategoriesList) CategoriesList.append( category ) vlist = [ ] CategoriesValues.append(vlist) CategoriesProps[ category ]", "# For each category, all props on any values ValueProps = { }", "UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\") # -- Main processing: Parse the script,", "category ] = [ ] for valDesc in values : val = valDesc[0]", "condname)] + parseConditions() dbg(\"#DBG No more conditions\") return [ ] # -------------- The", "of obligation\") for val in CategoriesValues[ col ] : if compatible((col,val), testcase) :", "s1,v1 = ob[0] s2,v2 = ob[1] t[s1]=v1 t[s2]=v2 return t # ------------------------------------------------------------ #", "only test cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\",", "= tokenStream.next() print_(\"Resuming from\" , Token) category = Token[0:-1] Token = six.next(tokenStream) values", "DBG: print_(\"--- Creating obligations list ---\") keys = CategoriesList nslots = len(keys) for", "col ] = DontCare dbg_p(\"#DBG ** Failing to fill column \", col ,", "1 candidates.append( (value, ob) ) obindex = obindex + 1 candidates.sort() candidates.reverse() dbg_p(\"###", "parts only. value = 1 ## For at least meeting one obligation ((s1,", "# ------------------------------------------------------------ def CaseMessage( msg, vector, dest=sys.stderr ) : \"\"\"Print a warning or", "for ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding : value =", "== ValueToken : val = parseValue() dbg(\"#DBG (parsed value: \", val, \")\") values.append(", "MAIN PROGRAM (after initialization above) ## ------------------------------------------------------------ # -- Respond to special diagnostic", "optparser.parse_args() Log.info(\"User options: \", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ##", "random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase ) ) : Suite.append( testcase ) clearObligations(", "two items must not occur together in any case) # A case is", "parser ---- # Consts for token classification EOF = \"<EOF>\" CategoryToken = \"<CAT>\"", "some compatible value and move on? dbg_p(\"#DBG *** Trying any value, regardless of", "if commentPos >= 0 : s = s[0:commentPos] for word in s.split() :", "= \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF def tokenClass(", ": print_(\"Syntax error, expecting value, saw \", Token ) return [ \"--bogus--\"] value", "+ 1 candidates.append( (value, ob) ) obindex = obindex + 1 candidates.sort() candidates.reverse()", "foo.cp # To read the same as above, and then produce a test", "multiples (non-single columns) as well, # because they are useful in several places", "have no effect def initial_suite_clear( initial_suite ) : matches = False reader =", "Token ) return [ \"--bogus--\"] value = [ Token ] Token = six.next(tokenStream)", "Build initial data structures ---- # Single columns are those in which all", "not in Excludes and obbackward not in Excludes: ObsList.append(obforward) Outstanding.add(obforward) ObsByCol[ i ].append(obforward)", "the use of this software, even if advised of the possibility of such", "-o -v -p < foo.cp # To read the same as above, and", "a limit (maxCandidates) on number of candidates considered colObs = ObsByCol[col] candidates =", "testcase) and compatible(ob[1], testcase): dbg_p(\"#DBG *** Compatible\", ob, testcase ) # Score the", "if ((tslot, testcase[tslot]),(slot,val)) in Excludes: return False return True # --------------------------------------------------------- def MakeTuple", ") if UserOptions.singles : Suite = [ ] CreateSingles() PrintTable( range(len(CategoriesList)), \"Single and", "cases ## Instrumentation INSTR_N_Comparisons = 0 # ---------- Read spec file using a", "columnOrder, testcase ) : if len (columnOrder) == 0 : dbg_p(\"#DBG: *** Success:", "print_(\"output_format:\", UserOptions.output_format) print_(\"varying:\", UserOptions.varying) print_(\"combinations:\", UserOptions.combinations) print_(\"singles:\", UserOptions.singles) print_(\"initial_suite:\", UserOptions.initial_suite) print_(\"pairs:\", UserOptions.pairs) print_(\"---------------------------\")", "being satisfied by a single ## or error case. ## For now, we", "List of (value, slot, condition) triples ## What we build Suite = [", "\"single\" : Singles.append( (val, slotNum, kind) ) singleton = True else : print_(\"*ERR*", "\"single\" : return SingleToken if tok == \"error\" : return ErrorToken return ValueToken", "Column \", i, \"'\" + col + \"'\", \"not in specification\") in_schema_map.append(-1) for", "looking for 'category:'\") print_(\"Skipping to next category\") ## Error recovery to next category", "ValueIfs : val, slot, cond = IfCond for conflict_slot in PropsSlots[ cond ]", "else: print_(\"{}{}={}\".format(sep,CategoriesList[col],vector[col]), end=\"\", file=dest) sep=\", \" print_(\"]\",file=dest) def ObToVector( ob ) : \"\"\"Convert", "dest=\"combinations\", help=\"\"\"Print only test cases covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\",", "s2 ] = old_v2 ## If we couldn't score any more obligations, can", "Token dbg(\"#DBG (parseValues)\") values = [ ] while tokenClass( Token ) == ValueToken", "come from \"except\" clauses for ExceptCond in ValueExcepts : val, slot, cond =", "obligations. ## ## NOTE: Currently considering only pair obligations, ## not singletons. We", "testcase ) col = columnOrder[0] if testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column", "error value (we don't know which will be handled ## by the application,", "list ---\") keys = CategoriesList nslots = len(keys) for i in range(nslots): ObsByCol[i]", "## Instrumentation INSTR_N_Comparisons = 0 # ---------- Read spec file using a simple", "next category\") ## Error recovery to next category while tokenClass( Token ) !=", "or 'error' values.\"\"\") optparser.add_option(\"-i\", \"--initial\", \"--initial-suite\", action=\"append\", default = [], dest=\"initial_suite\", help=\"\"\"Read initial", "-- Respond to special diagnostic options -- if UserOptions.license: print_(License) exit(0) if UserOptions.debug:", "Token = six.next(tokenStream) return [(\"except\" , condname)] + parseConditions() dbg(\"#DBG No more conditions\")", "warranties, including, but not limited to, the implied warranties of merchantability and fitness", "covering 'error' and 'single' values.\"\"\") optparser.add_option(\"-o\", \"--omit-singles\", action=\"store_false\", default=True, dest=\"singles\", help = \"\"\"Do", "= [ Token ] Token = six.next(tokenStream) conditions = parseConditions() dbg(\"#DBG parseValue returns\",", "!= v2 : for ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol])) in Outstanding", "## What we build Suite = [ ] ## List of test cases", "Rolling back \", s1, s2) # Restore previous values testcase[ s1 ] =", "of slots with it CategoriesProps = { } # For each category, all", "value + 1 if ((ccol,testcase[ccol]),(s2,v2)) in Outstanding : value = value + 1", "print_(\"%s=%s, %s=%s\" % (name1, v1, name2, v2)) ## ------------------------------------------------------------ ## MAIN PROGRAM (after", "## * Not consider any pairs as being satisfied by a single ##", "] != DontCare and testcase[slot] != val) : return False for tslot in", "def makeExcludes() : # Excludes that come from \"except\" clauses for ExceptCond in", ": if len(CategoriesValues[slot]) == 0 : print_(\"Warning: No non-singular value choices for \",", "if testcase[s2] != v2 : for ccol in range( len(testcase) ): if ((s2,v2),(ccol,testcase[ccol]))", ": slot, val = item if ( testcase[ slot ] != DontCare and", "vlist.append( val ) parseSpec() def parseValues(): global Token dbg(\"#DBG (parseValues)\") values = [", "*** Rolling back \", s1, s2) # Restore previous values testcase[ s1 ]", "whether it is a singleton singleton = False ValueProps[ (slotNum, val) ] =", "test suite is a list of test cases # An item is a", "cand old_v1 = testcase[ s1 ] testcase[ s1 ] = v1 old_v2 =", "if completeCase( columnOrder[1:], testcase ): return True else: testcase[ col ] = DontCare", "return True dbg_p(\"#DBG * Attempting to complete\", testcase ) col = columnOrder[0] if", "University of Oregon and <NAME>. All rights reserved. Redistribution and use in source", "= list(range( len(CategoriesList) ) ) random.shuffle(columnOrder) if ( completeCase( columnOrder, testcase ) )", "testcase[col] != DontCare: dbg_p(\"#DBG * Skipping column \", col, \" (already filled in)\")", "message concerning a particular partially-defined test vector\"\"\" print_( \"{} [\".format(msg), end=\"\", file=dest) sep=\"\"", "Set of ((slot,value),(slot,value)) (not symmetric) ObsList = [ ] # All obligations, but", "if j in SingleColumns: continue ## ## --- short cut doesn't work if", "## Seems to have no effect def initial_suite_clear( initial_suite ) : matches =", "call makeExcludes before # calling makeObligations # def makeExcludes() : # Excludes that", "of (slot,value,kind) where kind is \"single\" or \"error\" Excludes = set() ## Set", "value, saw \", Token ) return [ \"--bogus--\"] value = [ Token ]", "Read spec file using a simple LL parser ---- # Consts for token", "may miss other features, including other special cases) ## * Not consider any", "and choose the one with highest score. This is fairly expensive # (10^20", "\"<PROP>\" ExceptToken = \"<EXCEPT>\" ErrorToken = \"<ERROR>\" SingleToken = \"<SINGLE>\" EOFToken = EOF", "format mismatch with initial suite \", initial_suite) print_(\"*** Expecting columns \", in_schema ,", "\", UserOptions) if UserOptions.debug : print_(\"Enabling debugging\") DBG=True Log.setLevel(logging.DEBUG) ## Primary data structures", "columnOrder, testcase ) : Suite.append( testcase ) else: CaseMessage( \"Warning - No pair", "covers the missing pairs: python genpairs.py --csv --initial-suite tests.txt < foo.cp \"\"\" #", "suite \", initial_suite) print_(\"*** Expecting columns \", in_schema , \" but saw \",", "Performance debug messages def dbg_p(*msg): if DBGp: dbg(*msg) # ------------------------------------ ## User arguments", "is represented as a list, indexed by column (category) # A test suite", "# Excludes that come from \"if\" clauses --- reverse sense for IfCond in", "if DBG: print_(\"--- Creating obligations list ---\") keys = CategoriesList nslots = len(keys)", "\")\") values.append( val ) return values def parseValue(): global Token dbg(\"#DBG (parseValue, looking", "slot, value, kind) testcase[slot] = value if completeCase( columnOrder, testcase ) : Suite.append(", ": for cs_value in CategoriesValues[ conflict_slot ] : if cond not in ValueProps[", "testcase ) def completeCase( columnOrder, testcase ) : if len (columnOrder) == 0", "for cond in valDesc[1:] : kind = nameOf(cond) condVal = valOf(cond) if kind", "+ parseConditions() if tokenClass( Token ) == IfToken : Token = six.next(tokenStream) ifcond", "only pair obligations, ## not singletons. We should look at single and error" ]
[ "self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def", "Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\",", "players to generate the team output for :param limit: (default: None) just list", "a set of channel_ids on discord provided. :param channel_ids: the ids of the", "channel: the channel the message came through, i.e. team chat, general chat, etc.", "{}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a", ":param channel_list: the list of channels connected to the discord server :param player:", "channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic", "not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2 and", ":param vote: the vote itself, i.e. map change, kick player, etc. :param args:", "async def on_ready(self): \"\"\" Function called once the bot connected. Mainly displays status", "self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong", "# we found more than one matching member, let's tell the player about", "countdown, i.e. about to start. This function mainly updates the topics of the", "map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\" Handler called when", "def is_private_message(self, ctx): \"\"\" Checks whether a message was sent on a private", "self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True,", "at the beginning of the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list =", "\"\"\" Handler called when a player connects. The method sends a corresponding message", "= matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match in", "async def qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec messages from discord via", "need that, i.e. you did configured and of the qlx_discordReplaceMentions cvars as '0',", "minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e:", "str): \"\"\" Handles the authentication to the bot via private message :param ctx:", "None: return False return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\"", "\"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self): \"\"\" Function called", "self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx, password: str): \"\"\" Handles", "game.map gametype = game.type_short.upper() # CAUTION: if you change anything on the next", "the original message came through :param message: the content of the message \"\"\"", "for ch in channel_list if ch.name.lower() == match.lower()] if len(channel) == 1: return", "player about this. if len(channel) > 1 and player is not None: player.tell(\"Found", "game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to quake live will", "game.type_short.upper() reply = \"{0} on **{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle),", "player: minqlx.Player, msg, channel): \"\"\" Handler for reconnecting the discord bot to discord", "to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to 3 attempts for", "ctx): \"\"\" Checks whether a message was either sent in a configured relay", "member[0] # then try a direct match at the user's nickname member =", "self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\" relay a team_chat message, that might", "the given message should be filtered and not be sent to discord. :param", "with the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to", "the top 5 scorers connected to the server in a string. The return", "if ch.name.lower().find(match.lower()) != -1] if len(channel) == 1: return channel[0] # we found", "authentication for 5 minutes (300 seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication", "scorers with the scores and connection time to the server \"\"\" player_data =", "with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper", "the channels \"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids |", "Here the main interaction points either back to Quake Live or discord happen.", "f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command: str): \"\"\" Handles", "to forward, show the usage help text. if len(msg) < 2: return minqlx.RET_USAGE", "Command prefix for all commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix", "vote :param vote: the vote itself, i.e. map change, kick player, etc. :param", "len(channel_ids) == 0: return # set the topic in its own thread to", "the matching channel, or None if none or more than one are found", "\"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client = client self.author = author", "if len(member) == 1: return member[0] # then try a direct match at", "\"qlx\") command for authenticated users to execute server commands from discord * qlx_discordLogToSeparateLogfile", "Discord...\") self.connect_discord() return if len(msg) == 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from", "nicely in the Quake Live console. :param channel: the channel, the message came", "is None: previous_topic = topic # preserve the original channel's topic. position =", "* qlx_discordRelayChannelIds (default: \"\") Comma separated list of channel ids for full relay.", "to execute server commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging", "Bus Station server(s). You need to install discord.py in your python installation, i.e.", "you change anything on the next line, you may need to change the", "ctx): \"\"\" Checks whether a user is authed to the bot :param ctx:", "nothing. if message.author == self.discord.user: return # relay all messages from the relay", "if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is", ":param args: any arguments of the vote, i.e. map name, which player to", "continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\"", "ctx: the context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids |", "through discord. Here the main interaction points either back to Quake Live or", "direct match for the whole name first member = [user for user in", "will be barred from authentication for {} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id]", "suffixes. Make sure to use single quotes for the suffixes. * qlx_discordCommandPrefix (default:", "the nick, if set member = [user for user in member_list if user.name.lower().find(match.lower())", "any arguments of the vote, i.e. map name, which player to kick, etc.", "-m pip install -U discord.py \"\"\" import re import asyncio import threading import", "len(player_list) == 0: return \"\" players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True) if", "trigger :param msg: the original message the player sent (includes the trigger) :param", "client self.author = author self.discord_channel = discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name)", "status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake Live server\"))", "on the configured channels :param channel_ids: the set of channels to update the", "the set of channels to update the topic on :param topic: the topic", "certain commands in the relay and triggered channels as well as private authentication", ":param msg: the message to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class", "player: the player that send to the trigger :param msg: the message the", "separated list of regular expressions for messages that should not be sent from", "id of the channel to get the topic from :return: the topic of", "score \"\"\" if len(player_list) == 0: return \"\" players_by_score = sorted(player_list, key=lambda k:", "channel ids for relaying team chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated", "from the bot in the game console and server logfile, and sets the", "r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\",", "the topic set on discord channels. :param game: the game to derive the", "server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat,", "or len(channel_ids) == 0: return # set the topic in its own thread", "channel, the message came from. :param author: the author of the original message.", "\"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the topic of the provided channel", "by a space or at the beginning of the string matcher = re.compile(\"(?:^|", "channel (indicated by #channel-hint with a real mention :param message: the message to", "Live console. :param channel: the channel, the message came from. :param author: the", "\"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin is", "and nick :param member_list: the list of members connected to the discord server", "]{3,})\") channel_list = [ch for ch in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice,", "(default: \"quakelive\") Message prefix for the trigger on triggered relay channels. * qlx_discordTriggerStatus", "return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites the channel.reply function to", "= self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None and", "priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST)", "that was unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game):", "self.reply_to_context(ctx, \"Wrong password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has", "and init the main discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(),", "failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when the game", "\"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether a user is", "the topic suffix on the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids &", "message in its own thread to avoid blocking of the server for channel_id", "player=None): \"\"\" replaces a mentioned discord user (indicated by @user-hint with a real", "not triggered relay channels configured, do nothing. if not channel_ids or len(channel_ids) ==", "if len(player_list) == 0: return \"\" players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True)", "player: minqlx.Player, reason): \"\"\" Handler called when a player disconnects. The method sends", "enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken (default:", "import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's", "matching discord channels for #{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel in channel:", "is_filtered_message(self, msg): \"\"\" Checks whether the given message should be filtered and not", "as private authentication to the bot to admin the server. \"\"\" def __init__(self,", "completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents =", "top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler", "await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether a message was sent", "__init__(self, discord_client=None): super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\",", "def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return", "to quake live will be prefixed with this prefix * qlx_discordEnableHelp (default: \"1\")", "message replaced by properly formatted user mentions \"\"\" if not self.is_discord_logged_in(): return message", "\"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether a message was sent on a", "= discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites", "\"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is to", ":param message: the message to replace the user mentions in :param player: (default:", "# try a direct channel name match case-sensitive first channel = [ch for", "original message. :param content: the message itself, ideally taken from message.clean_content to avoid", "then try a case-insensitive direct match with the channel name channel = [ch", "tech channel of the Bus Station server(s). You need to install discord.py in", "cvars as '0', you can leave it unchecked. By default, this will be", "sends a corresponding message to the discord relay channels. :param caller: the player", "mentions (@user and #channel) for messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default:", "discord bot with commands and listeners on this pseudo cog class :param discord_bot:", "in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2 and msg[1]", "\"\"\" Checks whether a user is authed to the bot :param ctx: the", "The method sends a corresponding message to the discord relay channels, and updates", "in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix,", "found for the mentions used, this player is told what the alternatives are.", "\"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\")", "= message # this regular expression will make sure that the \"#channel\" has", "(i.e. replace '*' (asterisks) with a variant to not interfere with discord's formattings.)", "reveal more data about the server and its current game. :return: string of", "mapname, factory): \"\"\" Handler called when a map is changed. The method sends", "False def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for all", "if position != -1 else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id]", "\"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author,", "relay a team_chat message, that might be hidden to the given channel :param", "__init__(self, client, author, discord_channel): self.client = client self.author = author self.discord_channel = discord_channel", "\"\"\" find a channel that matches the given match :param match: the match", "Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\",", "user.nick.lower() == match.lower()] if len(member) == 1: return member[0] # if direct searches", "You are free to modify this plugin to your own one, except for", "a private chat to the bot :param ctx: the context the trigger happened", "discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the ending_note", "console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod", "its interactions on the discord server if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(),", "main interaction points either back to Quake Live or discord happen. :param message:", "not channel_ids or len(channel_ids) == 0: return # take the final 10 characters", "been successfully authenticated. \" \"You can now use {}{} to execute commands.\" .format(self.discord_command_prefix,", "msg): \"\"\" overwrites the player.tell function to relay messages to discord :param msg:", "\"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \" *(to", "logging for the discord library (logs to minqlx_discord.log in the homepath) \"\"\" def", "the vote :param vote: the vote itself, i.e. map change, kick player, etc.", "itself, i.e. map change, kick player, etc. :param args: any arguments of the", "triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the topic of", "alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None): \"\"\" replaces", "matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match in [\"all\",", "for in the channel name :param channel_list: the list of channels connected to", "disconnects. The method sends a corresponding message to the discord relay channels, and", "channels :param channel_ids: the set of channels to update the topic on :param", "having the bot send the current status of the game server. * qlx_discordMessagePrefix", "self.game if game is None: return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic,", "that send to the trigger :param msg: the original message the player sent", "substitutions will happen. :return: the original message replaced by properly formatted user mentions", "kept topic suffixes and the related suffixes. Make sure to use single quotes", "await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message): \"\"\" Function called once a", "the reason why the player left \"\"\" if reason in [\"disconnected\", \"timed out\",", "continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None: return False return not", "from authentication for 5 minutes (300 seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum", "we were not provided any channel_ids, do nothing. if not channel_ids or len(channel_ids)", "get the topic of the provided channel id :param channel_id: the id of", "message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content", "the bot via private message :param ctx: the context of the original message", "self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the topic of the provided", "discord ready text representation of the player's of that team by their score", "messages from the relay channels back to Quake Live. if message.channel.id in self.discord_relay_channel_ids:", "topic should be set upon. :param topic: the new topic that should be", "max_players) @staticmethod def get_game_info(game): \"\"\" Helper to format the current game.state that may", "be displayed nicely in the Quake Live console. :param channel: the channel, the", "== 0: return \"\" players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True) if limit:", "id :param channel_id: the id of the channel to get the topic from", "channel_ids or len(channel_ids) == 0: return # send the message in its own", "players_by_score[:limit] team_data = \"\" for player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score)", "# Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt)", "if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx):", "user (indicated by @user-hint with a real mention :param message: the message to", "= asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False,", "bot with commands and listeners on this pseudo cog class :param discord_bot: the", "return list(member)[0] # we found more than one matching member, let's tell the", "will be kept upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between", "ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda", "about this. if len(channel) > 1 and player is not None: player.tell(\"Found ^6{}^7", "__repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites the channel.reply function", "class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to respond to from within minqlx", "plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\",", "in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def", "told what the alternatives are. No replacements for the ambiguous substitutions will happen.", "all commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the trigger", "find a channel that matches the given match :param match: the match to", "chat and configured discord channels. There are two basic types of relay in", "variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides", "discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate whether to", "Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin to", ":param ctx: the context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids", "password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully authenticated. \" \"You", "displayed nicely in the Quake Live console. :param channel: the channel, the message", "to update the topic on :param topic: the topic to set on the", "return minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\"", "and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name}", "configured), and sends a message to all relay channels. \"\"\" game = self.game", "the trigger on triggered relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for having", "server. This function will forward and messages on the Quake Live server to", "from message.clean_content to avoid ids of mentioned users and channels on the discord", "self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True,", "Allow up to 3 attempts for the user's discord id to authenticate. if", "match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if", "message that is happening is forwarded to the other system, and some basic", "plugin to your own one, except for the version command related code. The", "asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False,", "player to kick, etc. :param passed: boolean indicating whether the vote passed \"\"\"", "(default: \"!\") Command prefix for all commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\")", "\"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the", "intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the", "server logfile, and sets the bot to playing Quake Live on discord. \"\"\"", "future to log those problems to the minqlx.logger \"\"\" pass def _topic_updater(self): try:", "loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a message to the configured relay_channels :param", "bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You will be", "by #channel-hint with a real mention :param message: the message to replace the", "that passed or failed, i.e. map change, kick player, etc. :param args: any", "checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send", "discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and #channel) for messages sent", "message to the discord relay channels. :param caller: the player that initiated the", "self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now configured", "prefix for all commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for", ":return: the current text representation of the game state \"\"\" if game.state ==", "is changed. The method sends a corresponding message to the discord relay channels.", "whether the bot will respond to !version or responses are completely switched off", "the discord relay channels. :param votes: the final votes :param vote: the initial", "on how to set up a bot for you discord network take a", "class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is to create a relay chat", "same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate whether", "relay channels. :param player: the player that send to the trigger :param msg:", "through, i.e. team chat, general chat, etc. \"\"\" if len(msg) > 2 or", "player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used to", "set() for item in string_set: if item == '': continue value = int(item)", "self.client = client self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self):", "used to communicate to discord, and provides certain commands in the relay and", "content: the content of the message to send to the discord channels \"\"\"", "[\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not", "running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self,", "member_list = [user for user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in", "the bot in the game console and server logfile, and sets the bot", "the user name and nick :param member_list: the list of members connected to", "\"\"\" # guard clause to avoid None messages from processing. if not message:", "of the vote, i.e. map name, which player to kick, etc. \"\"\" caller_name", "dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the bot, and init the main discord", "= \"{0} on **{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players,", "bot and its interactions on the discord server if discord_client is None: self.discord", "set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic on a set of channel_ids on", "guild_typing=False, dm_typing=False) # init the bot, and init the main discord interactions if", "on all the relay and all the triggered channels :param topic: the topic", "triggered channels as well as private authentication to the bot to admin the", "the given limit :return: a discord ready text representation of the player's of", "== \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg) ==", "or more than one are found \"\"\" # try a direct channel name", "set on discord channels. :param game: the game to derive the status information", "try a direct match at the user's nickname member = [user for user", "vote passed \"\"\" if passed: content = \"*Vote passed ({} - {}).*\".format(*votes) else:", "configured relay_channels :param msg: the message to send to the relay channel \"\"\"", "int_set(string_set): int_set = set() for item in string_set: if item == '': continue", "\"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when the", "e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx):", "priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize", "current game.state that may be used in status messages and setting of channel", "author of the original message. :param content: the message itself, ideally taken from", "get the actual cvar values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) #", "@minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel))", "mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update the", "trigger on triggered relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for having the", "relay_chat_message(self, player, channel, message): \"\"\" relay a message to the given channel :param", "not None and user.nick.lower() == match.lower()] if len(member) == 1: return member[0] #", "message: the message to replace the channel mentions in :param player: (default: None)", "when a vote was passed or failed. The method sends a corresponding message", "upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between automatic topic updates", "channel the message came through, i.e. team chat, general chat, etc. \"\"\" #", "all messages from the relay channels back to Quake Live. if message.channel.id in", "and all the triggered channels :param topic: the topic to set on all", "in the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() # maybe initialize plugin cvars", "return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin", "Set the topic on a set of channel_ids on discord provided. :param channel_ids:", "or more than one are found \"\"\" # try a direct match for", "else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return", "to derive the information from :return: the current text representation of the game", "minqlx for interactions with discord \"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client", "used, this player is told what the alternatives are. No replacements for the", "channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content,", "the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for ch in", "roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on the fragstealers_inc discord tech channel", "if game.state == \"warmup\": return \"Warmup\" if game.state == \"countdown\": return \"Match starting\"", "@staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided player's name for proper formatting to", "the message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player)", "help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) #", "dictionary of channel_ids for kept topic suffixes and the related suffixes. Make sure", "to replace the channel mentions in :param player: (default: None) when several alternatives", "self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread", "on the discord server if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else:", "player that sent the message :param msg: the message that was sent :param", "not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player)", "\"\") Comma separated list of channel ids for full relay. * qlx_discordRelayTeamchatChannelIds (default:", "if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel,", "relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message from QL with this", "that should be set. \"\"\" # if we were not provided any channel_ids,", "messages to the discord bot. * qlx_discordAuthCommand (default: \"auth\") command for authenticating a", "in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats", "\"\"\" Formats the top 5 scorers connected to the server in a string.", "a vote was started. The method sends a corresponding message to the discord", "the Bus Station server(s). You need to install discord.py in your python installation,", "from authentication for {} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async", "qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message from QL with this text portion.", "the formatted message that may be sent back to Quake Live. \"\"\" sender", "ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] >", "by @user-hint with a real mention :param message: the message to replace the", "discussed on the fragstealers_inc discord tech channel of the Bus Station server(s). You", "self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You will be barred from authentication for", "if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic on the", "configured and of the qlx_discordReplaceMentions cvars as '0', you can leave it unchecked.", "not include anything to forward, show the usage help text. if len(msg) <", "to a broadcast channel, and specific messages from another channel. For a description", "{1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message", "* qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message from QL with this text", "user's nickname member = [user for user in member_list if user.nick is not", "asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a message to the configured relay_channels", ".format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum auth attempts, we will bar her/him", "{} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set))", "state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle =", "message should not be relayed to discord \"\"\" for message_filter in self.discord_message_filters: matcher", "Triggers game status information sent towards the originating channel :param ctx: the context", "the triggered channels (when configured), and sends a message to all relay channels.", "message): \"\"\" Function called once a message is send through discord. Here the", "given match :param match: the match to look for in the user name", "channels to minqlx :param ctx: the context the trigger happened in :param message:", "original message sent for authentication :param password: the password to authenticate \"\"\" if", "no exception is produced for command errors Might be changed in the future", "\"\"\" Handler for reconnecting the discord bot to discord in case it gets", "with commands and listeners on this pseudo cog class :param discord_bot: the discord_bot", "({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message):", "in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match in [\"all\", \"everyone\", \"here\"]: continue", "logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set() for item in", "on this pseudo cog class :param discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth,", "portions of the name or portions of the nick, if set member =", "the player that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text):", "console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set() for", "\"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\")", "connected.\" def run(self): \"\"\" Called when the SimpleAsyncDiscord thread is started. We will", "may need to change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep", "Checks whether the message originate in a configured triggered channel :param ctx: the", "message, player=None): \"\"\" replaces a mentioned discord user (indicated by @user-hint with a", "which is used to communicate to discord, and provides certain commands in the", "the current status of the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from", "channel's topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if position !=", "hidden to the given channel :param player: the player that originally sent the", "user mentions in :param player: (default: None) when several alternatives are found for", "\"\"\" Handler called when the game is in countdown, i.e. about to start.", "logger used for logging, usually passed through from the minqlx plugin. \"\"\" super().__init__()", "this. if len(member) > 1 and player is not None: player.tell(\"Found ^6{}^7 matching", "in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\"", "file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler =", "a prefix needs to be used for the messages to be forwarded. These", "that, i.e. you did configured and of the qlx_discordReplaceMentions cvars as '0', you", "the final 10 characters from the topic, and search for it in the", "# update the topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self,", "return channel[0] # then try a case-insensitive direct match with the channel name", "= SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information())", "self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end,", "the provided player's name for proper formatting to discord (i.e. replace '*' (asterisks)", "%(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set =", "the user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command),", "changed in the future to log those problems to the minqlx.logger \"\"\" pass", "the bot :param ctx: the context the trigger happened in \"\"\" return isinstance(ctx.message.channel,", "return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats the top 5 scorers connected to", "minqlx plugin. \"\"\" super().__init__() self.version_information = version_information self.logger = logger self.discord = None", "return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids #", "topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the topic of the provided channel id", "is to create a relay chat between the Quake Live chat and configured", "matches the given match :param match: the match to look for in the", "to the given limit :return: a discord ready text representation of the player's", "sent on a private chat to the bot :param ctx: the context the", "channels. :param player: the player that send to the trigger :param msg: the", "msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for all chat messages on the server.", "a sorted output of the team's player by their score :param player_list: the", "Provides the ending_note for the help output. \"\"\" command_name = self.context.invoked_with return \"Type", "= 300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You will be barred", "\"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set()", "in \"\"\" try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players", "configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set", "there are not triggered relay channels configured, do nothing. if not channel_ids or", "the mentions used, this player is told what the alternatives are. No replacements", "original message replaced by properly formatted channel mentions \"\"\" if not self.is_discord_logged_in(): return", "channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message", "method sends a corresponding message to the discord relay channels, and updates the", "self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content)", "of the original message sent for authentication :param password: the password to authenticate", "get the topic from :return: the topic of the channel \"\"\" channel =", "match at the user's nickname member = [user for user in member_list if", "of channel ids for relaying team chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma", "{}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler called", "discord channels \"\"\" if not self.is_discord_logged_in(): return # if we were not provided", "in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None):", "user that matches the given match :param match: the match to look for", "self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when a", "user.name.lower() == match.lower()] if len(member) == 1: return member[0] # then try a", "need to enable the Server Members Intent for the bot in order to", "off * qlx_discordEnableVersion (default: \"1\") indicates whether the bot will respond to !version", "the original message. :param content: the message itself, ideally taken from message.clean_content to", "when the SimpleAsyncDiscord thread is started. We will set up the bot here", "itself, ideally taken from message.clean_content to avoid ids of mentioned users and channels", "is not None and user.nick.lower() == match.lower()] if len(member) == 1: return member[0]", "player that send to the trigger :param msg: the message the player sent", "to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False,", "to !version or responses are completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display", "key=lambda k: k.score, reverse=True) if limit: players_by_score = players_by_score[:limit] team_data = \"\" for", "help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now configured bot to discord in", "or None if none or more than one are found \"\"\" # try", ":param ctx: the context the trigger happened in \"\"\" try: game = minqlx.Game()", "context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self,", ":param discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not", "find_user_that_matches(match, member_list, player=None): \"\"\" find a user that matches the given match :param", "minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path,", "previous_topic is None: previous_topic = topic # preserve the original channel's topic. position", "not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True,", "cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler for reconnecting the discord", "new event_loop until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or", "able to replace discord user mentions. If you don't need that, i.e. you", "returned in that case. :return: the matching member, or None if none or", "the channel name :param channel_list: the list of channels connected to the discord", "bot to use to connect to discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated", "Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes =", "the given match :param match: the match to look for in the channel", "separated list of channel ids where the topic suffix will be kept upon", "The return value may be used for status messages and used in topics", "the vote, i.e. map name, which player to kick, etc. :param passed: boolean", "ctx): \"\"\" Checks whether the message originate in a configured triggered channel :param", "[ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match: len(channel_match),", "team_data(player_list, limit=None): \"\"\" generates a sorted output of the team's player by their", "client the discord bot runs in. :param version_information: the plugin's version_information string :param", "Handler called when a player connects. The method sends a corresponding message to", "bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\")", "threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec messages from", "and msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) ==", "SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval =", "is authed to the bot :param ctx: the context the trigger happened in", "1: return channel[0] # then try a case-insensitive direct match with the channel", "message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author,", "kick, etc. :param passed: boolean indicating whether the vote passed \"\"\" if passed:", "sent. \"\"\" # guard clause to avoid None messages from processing. if not", "has at least three characters, and is either # prefixed by a space", "a space or at the beginning of the string matcher = re.compile(\"(?:^| )@([^", "for the match fail, we try to match portions of the name or", "that no exception is produced for command errors Might be changed in the", "(default: \"^\\!s$, ^\\!p$\") comma separated list of regular expressions for messages that should", "triggered relay channels. :param player: the player that send to the trigger :param", "connected. Mainly displays status update from the bot in the game console and", "return channel[0] # we found more than one matching channel, let's tell the", "channel. For a description on how to set up a bot for you", "ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message from", "try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as", "sent :param channel: the chnannel the message was sent to \"\"\" handled_channels =", "of the mydiscordbot, you also need to enable the Server Members Intent for", "host with the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag", "chat, etc. \"\"\" # when the message did not include anything to forward,", "disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for", "and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on the fragstealers_inc discord tech", "two basic types of relay in this basic version of a discord plugin:", "authed to the bot :param ctx: the context the trigger happened in \"\"\"", "player is told what the alternatives are. None is returned in that case.", "to replace discord user mentions. If you don't need that, i.e. you did", "for authenticating a discord user to the plugin via private message * qlx_discordExecPrefix", "player's of that team by their score \"\"\" if len(player_list) == 0: return", "generates a sorted output of the team's player by their score :param player_list:", "if previous_topic is None: previous_topic = topic # preserve the original channel's topic.", "= 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password.", "2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self,", "message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self,", "for having the bot send the current status of the game server. *", "\"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason)))", "relay_team_chat_message(self, player, channel, message): \"\"\" relay a team_chat message, that might be hidden", "mention :param message: the message to replace the channel mentions in :param player:", "all the channels \"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids", "minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic):", "the relay channels and the triggered channels (when configured), and sends a message", "\"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers", "<https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin to your own one, except", "topic as well as the trigger channels, when configured. :param mapname: the new", "to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def", "= Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool)", "of the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for ch", "3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password. You", "discord provided. :param channel_ids: the ids of the channels the topic should be", "set up a bot for you discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`.", "channel_list, player) if channel is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message", "interactions with discord \"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client = client", "def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites the player.tell", "topic that should be set. \"\"\" # if we were not provided any", "players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper to format", "whether the given message should be filtered and not be sent to discord.", "minqlx.Game): \"\"\" Generate the text for the topic set on discord channels. :param", "the channel, the message came from. :param author: the author of the original", ":param author: the author of the original message. :param content: the message itself,", "that sent the message :param msg: the message that was sent :param channel:", "string of the current top5 scorers with the scores and connection time to", "\"\"\" overrides the default command error handler so that no exception is produced", "channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def", "server for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue", "none or more than one are found \"\"\" # try a direct match", "player: the player that originally sent the message :param message: the content of", "the original message replaced by properly formatted channel mentions \"\"\" if not self.is_discord_logged_in():", "and some basic Quake Live status updates are send to discord * triggered", "i.e. map name, which player to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if", ":param game: the game to derive the status information from :return: the topic", "* qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate whether to update the topic", "avoid blocking of the server for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if", "limit :return: a discord ready text representation of the player's of that team", "* qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids for kept topic suffixes and", "the text for the topic set on discord channels. :param game: the game", "in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion # of the triggered", "len(msg) == 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\")", "and listeners on this pseudo cog class :param discord_bot: the discord_bot to initialize", "in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str): \"\"\" Relays a message from", "discord relay channels, and updates the relay channel topic as well as the", "0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception, ctx): \"\"\" overrides the", "the topic suffix intact on the configured channels :param channel_ids: the set of", "message himself, do nothing. if message.author == self.discord.user: return # relay all messages", "# then try a case-insensitive direct match with the channel name channel =", "None: return \"No discord connection set up.\" if self.is_discord_logged_in(): return \"Discord connection up", "and setting of channel topics. :param game: the game object to derive the", "\"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content)", "came through :param message: the content of the message \"\"\" if self.discord_replace_relayed_mentions: message", "channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for having the bot send the current", "self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot,", "server. :return: the formatted message that may be sent back to Quake Live.", "self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord,", "intents=intents) self.initialize_bot(self.discord) # connect the now configured bot to discord in the event_loop", "return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites the player.tell function to", "respond to from within minqlx for interactions with discord \"\"\" def __init__(self, client,", "well as the trigger channels, when configured. :param mapname: the new map :param", "def player_data(): \"\"\" Formats the top 5 scorers connected to the server in", "minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message was either sent", "* qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for the discord library (logs to", "own one, except for the version command related code. The basic ideas for", "the ending_note for the help output. \"\"\" command_name = self.context.invoked_with return \"Type {0}{1}", "send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which", "self.version_information = version_information self.logger = logger self.discord = None self.authed_discord_ids = set() self.auth_attempts", ":param msg: the message to check whether it should be filtered :return whether", "\"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent,", "version(self, ctx): \"\"\" Triggers the plugin's version information sent to discord :param ctx:", "if game.map_title else game.map gametype = game.type_short.upper() reply = \"{0} on **{1}** ({2})", "\"Maximum authentication attempts reached. \" \"You will be barred from authentication for {}", "message: the message to send to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with))", "except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\"", "content of the message to send to the discord channels \"\"\" if not", "minqlx_discord.log in the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() # maybe initialize plugin", "self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\",", "sender = author.name if author.nick is not None: sender = author.nick if not", ":param match: the match to look for in the user name and nick", "team output for :param limit: (default: None) just list the top players up", "the discord channel for configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma", "that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes", "channel id :param channel_id: the id of the channel to get the topic", "relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list of regular expressions", "discord (i.e. replace '*' (asterisks) with a variant to not interfere with discord's", "caller else \"The server\" content = \"_{} called a vote: {} {}_\".format(caller_name, vote,", "presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the", "send a triggered message to the configured triggered_channel :param player: the player that", "if self.discord is None: return False return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self,", "direct searches for the match fail, we try to match portions of the", "max_players = game.maxclients maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper()", "authenticated. \" \"You can now use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return", "player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates a sorted output of the team's", "Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg) == 2 and msg[1] ==", ":param player: the player that originally sent the message :param channel: the channel", "if user.nick is not None and user.nick.lower() == match.lower()] if len(member) == 1:", "relay messages to discord \"\"\" def __init__(self, client, author, discord_channel): self.client = client", "trigger channels, when configured. :param player: the player that connected :param reason: the", "fail, we try to match portions of the name or portions of the", "the given channel :param player: the player that originally sent the message :param", "loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now configured bot to discord in the", "prefixed by a space or at the beginning of the string matcher =", "def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to respond", "the topic to set on all the channels \"\"\" if not self.is_discord_logged_in(): return", "to generate the team output for :param limit: (default: None) just list the", "self._topic_updater() async def on_message(self, message): \"\"\" Function called once a message is send", "self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\",", "trigger happened in :param message: the message to send to minqlx \"\"\" prefix_length", "set on the given channels \"\"\" # if there are not triggered relay", "RotatingFileHandler import minqlx from minqlx import Plugin import discord from discord import ChannelType,", "indicate reveal more data about the server and its current game. :return: string", "for all commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the", "Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the", "matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for ch in self.discord.get_all_channels() if", "corresponding message to the discord relay channels, and updates the relay channel topic", "discord chat channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return", "The method sends a corresponding message to the discord relay channels. and updates", "status information sent towards the originating channel :param ctx: the context the trigger", "# initialize the discord bot and its interactions on the discord server if", "[\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str =", "the message :param message: the content of the message \"\"\" if not self.discord_triggered_channel_ids:", "channels. and updates the relay channel topic as well as the trigger channels,", "also need to enable the Server Members Intent for the bot in order", "information sent towards the originating channel :param ctx: the context the trigger happened", "trigger happened in \"\"\" try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players =", "if len(channel) == 1: return channel[0] # then we try a match with", "so that no exception is produced for command errors Might be changed in", "match)) alternatives = \"\" for alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives)", "\"You will be barred from authentication for {} seconds.\" .format(bar_delay)) def f(): del", "intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True,", ":param content: the content of the message to send to the discord channels", "stop(self): \"\"\" stops the discord client \"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"),", "be sent to discord. :param msg: the message to check whether it should", "on the fragstealers_inc discord tech channel of the Bus Station server(s). You need", "if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception, ctx):", "self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully authenticated. \" \"You can now use", "the message originate in a configured triggered channel :param ctx: the context the", "to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message): \"\"\" Function called", "topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to", "minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper", "with the scores and connection time to the server \"\"\" player_data = \"\"", "to kick, etc. :param passed: boolean indicating whether the vote passed \"\"\" if", "be set upon. :param topic: the new topic that should be set. \"\"\"", "int(item) int_set.add(value) return int_set def status(self): if self.discord is None: return \"No discord", "Checks whether an author is currently barred from authentication to the bot :param", ":param channel: the channel, the message came from. :param author: the author of", "== match.lower()] if len(member) == 1: return member[0] # then try a direct", "if game.state == \"countdown\": return \"Match starting\" if game.roundlimit in [game.blue_score, game.red_score] or", "on the given channels and keeps the topic suffix intact on the configured", "self.discord_channel) def tell(self, msg): \"\"\" overwrites the player.tell function to relay messages to", "A help formatter for the minqlx plugin's bot to provide help information. This", "> 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None): \"\"\"", "the context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def", "this prefix * qlx_discordEnableHelp (default: \"1\") indicates whether the bot will respond to", "that the connection to discord is properly closed when this plugin is unloaded.", "bot to provide help information. This is a customized variation of discord.py's :class:`DefaultHelpCommand`.", "Helper to format the current game.state that may be used in status messages", ":param player_list: the list of players to generate the team output for :param", "channel_ids, content): \"\"\" Send a message to a set of channel_ids on discord", "stops the discord client \"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(),", "from :return: the current text representation of the game state \"\"\" if game.state", "# maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\")", "**{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats the top", "discord_client=None): super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\")", "version_information: the plugin's version_information string :param logger: the logger used for logging, usually", "a user that matches the given match :param match: the match to look", "in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self,", "token of the discord bot to use to connect to discord. * qlx_discordRelayChannelIds", "match :param match: the match to look for in the channel name :param", "set up.\" if self.is_discord_logged_in(): return \"Discord connection up and running.\" return \"Discord client", "plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST)", "self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly set the topic on", "self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message to a set", "order to be able to replace discord user mentions. If you don't need", "from another channel. For a description on how to set up a bot", "for alternative_member in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self,", "the trigger channels, when configured. :param player: the player that connected :param reason:", "self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{}", "types of relay in this basic version of a discord plugin: * full", "the player that initiated the vote :param vote: the vote itself, i.e. map", "anything on the next line, you may need to change the topic_ending logic", "return ctx.send(message) async def version(self, ctx): \"\"\" Triggers the plugin's version information sent", "to indicate reveal more data about the server and its current game. :return:", "Commands\") def get_ending_note(self): \"\"\" Provides the ending_note for the help output. \"\"\" command_name", "the user mentions in :param player: (default: None) when several alternatives are found", "is_private_message(self, ctx): \"\"\" Checks whether a message was sent on a private chat", "the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx,", "match in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match in [\"all\", \"everyone\", \"here\"]:", "the topic on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) #", "in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers game status information", "self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake", "message): \"\"\" relay a message to the given channel :param player: the player", "len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\")", "kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{}", "triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message from QL with", "player) if self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix != \"\": content =", "self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand):", "return # if the bot sent the message himself, do nothing. if message.author", "\"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content =", "channel = [ch for ch in channel_list if ch.name.lower() == match.lower()] if len(channel)", "message to the bot :param ctx: the context the trigger happened in :param", "is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list,", "discord import ChannelType, AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version", "matcher.match(msg): return True return False def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\"", "of the server via discord private messages to the discord bot. * qlx_discordAuthCommand", "anything to forward, show the usage help text. if len(msg) < 2: return", ":func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion # of the triggered relay channels'", "about the server and its current game. :return: string of the current top5", "5 scorers connected to the server in a string. The return value may", "the original message the player sent (includes the trigger) :param channel: the channel", "relay_channels :param msg: the message to send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids,", "where every text message that is happening is forwarded to the other system,", "== match.lower()] if len(channel) == 1: return channel[0] # then we try a", "#channel) for messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions", "channel = [ch for ch in channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel)", "well as private authentication to the bot to admin the server. \"\"\" def", "the discord bot and its interactions on the discord server if discord_client is", "of channel_ids on discord provided. :param channel_ids: the ids of the channels the", "the matching member, or None if none or more than one are found", "triggered relay channels configured, do nothing. if not channel_ids or len(channel_ids) == 0:", "password: str): \"\"\" Handles the authentication to the bot via private message :param", "maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper() # CAUTION: if", "# keep the topic suffix on the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix(", "self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status =", "\"\") Comma separated list of channel ids for relaying team chat messages. *", "players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg): \"\"\" Checks", "sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is", "topic to set on all the channels \"\"\" if not self.is_discord_logged_in(): return if", "up to the given limit :return: a discord ready text representation of the", "topic as well as the trigger channels, when configured. :param player: the player", "sent (includes the trigger) :param channel: the channel the message came through, i.e.", "asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\" relay", "Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's", "self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger()", "and content of a message so that it will be displayed nicely in", "= version_information self.logger = logger self.discord = None self.authed_discord_ids = set() self.auth_attempts =", "the player that sent the message :param msg: the message that was sent", "will happen. :return: the original message replaced by properly formatted user mentions \"\"\"", "be used for status messages and used in topics to indicate reveal more", "by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this", "plugin's version_information string :param logger: the logger used for logging, usually passed through", "guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False,", "\"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar values from the server self.discord_message_filters", "update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update the topics on all the relay", "(default: \"\") Comma separated list of channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix", ":param ctx: the context of the original message sent for authentication :param password:", "may be used for status messages and used in topics to indicate reveal", "the server. This function will forward and messages on the Quake Live server", "for item in string_set: if item == '': continue value = int(item) int_set.add(value)", "connection set up.\" if self.is_discord_logged_in(): return \"Discord connection up and running.\" return \"Discord", "a configured triggered channel :param ctx: the context the trigger happened in \"\"\"", "None if none or more than one are found \"\"\" # try a", "in member_list if user.nick is not None and user.nick.lower() == match.lower()] if len(member)", "be prefixed with this prefix * qlx_discordEnableHelp (default: \"1\") indicates whether the bot", "server and its current game. :return: string of the current top5 scorers with", "information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready)", "for the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix for all commands from", "of the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to quake", "Function called once the bot connected. Mainly displays status update from the bot", "to a set of channel_ids on discord provided. :param channel_ids: the ids of", "upon. :param topic: the new topic that should be set. \"\"\" # if", "tell the player about this. if len(member) > 1 and player is not", "authenticated users to execute server commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables", "\"warmup\": return \"Warmup\" if game.state == \"countdown\": return \"Match starting\" if game.roundlimit in", "game.red_score < 0 or game.blue_score < 0: return \"Match ended: **{}** - **{}**\".format(game.red_score,", "*qlx_command: str): \"\"\" Handles exec messages from discord via private message to the", "discord \"\"\" for message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return True", "for all chat messages on the server. This function will forward and messages", "least three characters, and is either # prefixed by a space or at", "message to the configured relay_channels :param msg: the message to send to the", "the bot in order to be able to replace discord user mentions. If", "help information. This is a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self):", "This function mainly updates the topics of the relay channels and the triggered", "this will be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses:", "self.discord is None: return False return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids,", "derive the information from :return: the current text representation of the game state", "match: the match to look for in the user name and nick :param", "next line, you may need to change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self,", "commands and listeners on this pseudo cog class :param discord_bot: the discord_bot to", "for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered message from QL", "team chat, general chat, etc. \"\"\" # when the message did not include", "relay channels. :param votes: the final votes :param vote: the initial vote that", "seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command:", "minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self,", "topic_channel_ids = self.discord_relay_channel_ids # directly set the topic on channels with no topic", "not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None):", "if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3)", "top players up to the given limit :return: a discord ready text representation", "a team_chat message, that might be hidden to the given channel :param player:", "\"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual", "if there are not triggered relay channels configured, do nothing. if not channel_ids", "\"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\",", "relay channel topic as well as the trigger channels, when configured. :param player:", "tell(self, msg): \"\"\" overwrites the player.tell function to relay messages to discord :param", "server to discord. :param player: the player that sent the message :param msg:", "= ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks", "then try a direct match at the user's nickname member = [user for", "to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client", "chat to the bot :param ctx: the context the trigger happened in \"\"\"", "0 or game.blue_score < 0: return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if", "\"\"\" Handler called when a vote was passed or failed. The method sends", "mentions (@user and #channel) for triggered messages sent towards the triggered channels *", "self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel,", "is either # prefixed by a space or at the beginning of the", "of the Bus Station server(s). You need to install discord.py in your python", "kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content =", "message replaced by properly formatted channel mentions \"\"\" if not self.is_discord_logged_in(): return message", "user.nick is not None and user.nick.lower() == match.lower()] if len(member) == 1: return", "minqlx.RET_USAGE if len(msg) == 2 and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting", "to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \"", "of channel ids where the topic suffix will be kept upon updating. *", "derive the status information from :return: the topic that represents the current game", "loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None: return False return not self.discord.is_closed() and", "[ch for ch in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches =", "\"\") Prefix any triggered message from QL with this text portion. Useful when", "= game.map_title if game.map_title else game.map gametype = game.type_short.upper() # CAUTION: if you", "member_list if user.nick is not None and user.nick.lower() == match.lower()] if len(member) ==", "in channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel) == 1: return channel[0] #", "!help or responses are completely switched off * qlx_discordEnableVersion (default: \"1\") indicates whether", "the right commands, and run the discord.py bot in a new event_loop until", "of the message to send to the discord channels \"\"\" if not self.is_discord_logged_in():", "if author.nick is not None: sender = author.nick if not self.discord_show_relay_channel_names and channel.id", "for this plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly", "import Plugin import discord from discord import ChannelType, AllowedMentions from discord.ext.commands import Bot,", "sent to discord. :param msg: the message to check whether it should be", "qlx_discordCommandPrefix (default: \"!\") Command prefix for all commands from discord * qlx_discordTriggerTriggeredChannelChat (default:", "cvar values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin", "topics! return \"{0} on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players,", "was unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\"", "be escaped for discord chat channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text =", "{3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self): \"\"\" Function called once the bot", "self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update the topics on all", "the topic should be set upon. :param topic: the new topic that should", "this plugin is unloaded. :param plugin: the plugin that was unloaded. \"\"\" if", "game.state == \"warmup\": return \"Warmup\" if game.state == \"countdown\": return \"Match starting\" if", "if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player,", "of specific messages between discord and Quake Live chat where a prefix needs", "a direct channel name match case-sensitive first channel = [ch for ch in", "team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name not", "None: player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel), match)) alternatives = \"\" for", "of the provided channel id :param channel_id: the id of the channel to", "Live chat and discord, where every text message that is happening is forwarded", "#channel-hint with a real mention :param message: the message to replace the channel", "for in the user name and nick :param member_list: the list of members", "Quake Live console. :param channel: the channel, the message came from. :param author:", "found \"\"\" # try a direct match for the whole name first member", "logging.handlers import RotatingFileHandler import minqlx from minqlx import Plugin import discord from discord", "\"\"\" def __init__(self, discord_client=None): super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\",", "@staticmethod def get_game_info(game): \"\"\" Helper to format the current game.state that may be", "def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message to a set of channel_ids", "might be hidden to the given channel :param player: the player that originally", "channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel that", "relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message", "== 1: return channel[0] # then we try a match with portions of", "= \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await", "function to relay messages to discord :param msg: the msg to send to", "bot in a new event_loop until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent", "do nothing. if not channel_ids or len(channel_ids) == 0: return # send the", "list of channel ids where the topic suffix will be kept upon updating.", "reason: the reason why the player left \"\"\" if reason in [\"disconnected\", \"timed", "* qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to quake live will be prefixed", "msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg)", "proper formatting to discord (i.e. replace '*' (asterisks) with a variant to not", "<https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the mydiscordbot, you also need to enable", ":param ctx: the context the trigger happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids", "the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self,", "Quake Live or discord happen. :param message: the message that was sent. \"\"\"", "if len(channel) > 1 and player is not None: player.tell(\"Found ^6{}^7 matching discord", "code. The basic ideas for this plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py>", "Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message): \"\"\" Function", "if len(member) == 1: return list(member)[0] # we found more than one matching", "usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and its interactions", "\"\"\" The plugin's main purpose is to create a relay chat between the", "look for in the channel name :param channel_list: the list of channels connected", "in a configured triggered channel :param ctx: the context the trigger happened in", "player that connected :param reason: the reason why the player left \"\"\" if", "single quotes for the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix for all", "need to install discord.py in your python installation, i.e. python3 -m pip install", "mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is to create a relay chat between", "\"\"\" def __init__(self, client, author, discord_channel): self.client = client self.author = author self.discord_channel", ":param player: the player that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod", "self.disconnect_discord() return if len(msg) == 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\")", "if len(channel) == 1: return channel[0] # we found more than one matching", "set upon. :param topic: the new topic that should be set. \"\"\" #", "Checks whether a user is authed to the bot :param ctx: the context", "initial vote that passed or failed, i.e. map change, kick player, etc. :param", "\\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes", ":param content: the message itself, ideally taken from message.clean_content to avoid ids of", "topic suffix on the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids,", "the message to replace the channel mentions in :param player: (default: None) when", "displays status update from the bot in the game console and server logfile,", "and the related suffixes. Make sure to use single quotes for the suffixes.", "bot for you discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version", "Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\")", "time to the server \"\"\" player_data = \"\" teams = Plugin.teams() if len(teams['red'])", "this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used", "message :param message: the content of the message \"\"\" if not self.discord_triggered_channel_ids: return", "when running multiple servers on the same host with the same discord connected", "to 3 attempts for the user's discord id to authenticate. if ctx.message.author.id not", "main purpose is to create a relay chat between the Quake Live chat", "content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided player's", "def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command: str): \"\"\"", "= players_by_score[:limit] team_data = \"\" for player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name),", "private message to the bot :param ctx: the context the trigger happened in", "except for the version command related code. The basic ideas for this plugin", "has reached maximum auth attempts, we will bar her/him from authentication for 5", "the player.tell function to relay messages to discord :param msg: the msg to", "that case. :return: the matching channel, or None if none or more than", "alternatives are. None is returned in that case. :return: the matching channel, or", "whether the bot will respond to !help or responses are completely switched off", "triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin of the server", "@staticmethod def int_set(string_set): int_set = set() for item in string_set: if item ==", "for @{}:\".format(len(member), match)) alternatives = \"\" for alternative_member in member: alternatives += \"@{}", "message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel,", "console. :param channel: the channel, the message came from. :param author: the author", "to Quake Live or discord happen. :param message: the message that was sent.", "== 2 and msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if", "game.map_title else game.map gametype = game.type_short.upper() reply = \"{0} on **{1}** ({2}) with", "to the bot :param ctx: the context the trigger happened in :param qlx_command:", "etc. \"\"\" # when the message did not include anything to forward, show", "game console and server logfile, and sets the bot to playing Quake Live", "than one are found \"\"\" # try a direct match for the whole", "content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content = \"**{}**: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), message)", ":param plugin: the plugin that was unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop()", "message.author == self.discord.user: return # relay all messages from the relay channels back", "exception is produced for command errors Might be changed in the future to", "= [user for user in member_list if user.name.lower().find(match.lower()) != -1 or (user.nick is", "os from logging.handlers import RotatingFileHandler import minqlx from minqlx import Plugin import discord", "== \"warmup\": return \"Warmup\" if game.state == \"countdown\": return \"Match starting\" if game.roundlimit", "trigger happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self,", "the player that connected :param reason: the reason why the player left \"\"\"", "player: the player that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def", "the game console and server logfile, and sets the bot to playing Quake", "player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned discord channel", "execute server commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for", "of channel topics. :param game: the game object to derive the information from", "if len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data", "channels connected to the discord server :param player: (default: None) when several alternatives", "return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player,", "self.discord.user: return # relay all messages from the relay channels back to Quake", "set) # adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect,", "self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\"", "= \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\")", "from the minqlx plugin. \"\"\" super().__init__() self.version_information = version_information self.logger = logger self.discord", "= SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix =", "msg to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord", "5 minutes (300 seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached.", "not provided any channel_ids, do nothing. if not channel_ids or len(channel_ids) == 0:", "topic topic_ending = topic[-10:] for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic", "your python installation, i.e. python3 -m pip install -U discord.py \"\"\" import re", "import asyncio import threading import logging import os from logging.handlers import RotatingFileHandler import", "e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message", "keep the right portion # of the triggered relay channels' topics! return \"{0}", "set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix =", "the server in a string. The return value may be used for status", "= \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is to create a", "the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload)", "\"\"\" relay a message to the configured relay_channels :param msg: the message to", "found more than one matching channel, let's tell the player about this. if", "channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\" relay a team_chat message,", "of the name or portions of the nick, if set member = [user", "!= -1 else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update", "Make sure to use single quotes for the suffixes. * qlx_discordCommandPrefix (default: \"!\")", "the message came through, i.e. team chat, general chat, etc. \"\"\" if len(msg)", "isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether a user is authed to", "passed or failed, i.e. map change, kick player, etc. :param args: any arguments", "direct match at the user's nickname member = [user for user in member_list", "reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message originate", "can now use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up", "and \\ self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else:", "channel to get the topic from :return: the topic of the channel \"\"\"", "Quake Live server to discord. :param player: the player that sent the message", "the provided channel id :param channel_id: the id of the channel to get", "to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player", "get_channel_topic(self, channel_id): \"\"\" get the topic of the provided channel id :param channel_id:", "[user for user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda", "or (user.nick is not None and user.nick.lower().find(match.lower()) != -1)] if len(member) == 1:", "to. :param content: the content of the message to send to the discord", "enables extended logging for the discord library (logs to minqlx_discord.log in the homepath)", "* triggered relay of specific messages between discord and Quake Live chat where", "loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a message to the configured", "Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper to format the current", "not message: return # if the bot sent the message himself, do nothing.", "representation of the game state \"\"\" if game.state == \"warmup\": return \"Warmup\" if", "than one matching member, let's tell the player about this. if len(member) >", "not interfere with discord's formattings.) :param text: the text that shall be escaped", "the name or portions of the nick, if set member = [user for", "f).start() async def qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec messages from discord", "topic_suffix = previous_topic[position + len(topic_ending):] if position != -1 else previous_topic if channel_id", "purpose is to create a relay chat between the Quake Live chat and", "bot will respond to !version or responses are completely switched off * qlx_displayChannelForDiscordRelayChannels", "message from the triggered channels to minqlx :param ctx: the context the trigger", "for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(", "to discord. :param msg: the message to check whether it should be filtered", "exec messages from discord via private message to the bot :param ctx: the", "current text representation of the game state \"\"\" if game.state == \"warmup\": return", "sends a corresponding message to the discord relay channels. :param votes: the final", "def on_command_error(self, exception, ctx): \"\"\" overrides the default command error handler so that", "on triggered relay channels. Your bot needs edit_channel permission for these channels. *", "\"\") The token of the discord bot to use to connect to discord.", "and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if", "passed or failed. The method sends a corresponding message to the discord relay", "if user.name.lower().find(match.lower()) != -1 or (user.nick is not None and user.nick.lower().find(match.lower()) != -1)]", "ambiguous substitutions will happen. :return: the original message replaced by properly formatted channel", "#{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name)", "should be sent to. :param content: the content of the message to send", "pass_context=True, ignore_extra=False, help=\"display the plugin's version information\")) def reply_to_context(self, ctx, message): return ctx.send(message)", "None def replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned discord channel (indicated by", "channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if", "to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx: not", "description. Uses: * qlx_discordBotToken (default: \"\") The token of the discord bot to", "sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match in [\"all\", \"everyone\", \"here\"]: continue member", "suffix will be kept upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of seconds", "should not be sent from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\")", "discord library (logs to minqlx_discord.log in the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__()", "and its current game. :return: string of the current top5 scorers with the", "\"\"\" Handles the authentication to the bot via private message :param ctx: the", "given message should be filtered and not be sent to discord. :param msg:", "channels (when configured), and sends a message to all relay channels. \"\"\" game", "these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of channel ids where", "the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a discord bot with commands", "\"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \" *(to", "match: the match to look for in the channel name :param channel_list: the", "original message the player sent (includes the trigger) :param channel: the channel the", "to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is", "for command errors Might be changed in the future to log those problems", "it unchecked. By default, this will be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents>", "SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\")", "# take the final 10 characters from the topic, and search for it", ":return: the topic of the channel \"\"\" channel = self.discord.get_channel(channel_id) if channel is", "game.maxclients maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper() reply =", "ids of the channels the message should be sent to. :param content: the", "state \"\"\" if game.state == \"warmup\": return \"Warmup\" if game.state == \"countdown\": return", "relay between Quake Live chat and discord, where every text message that is", "or triggered channel :param ctx: the context the trigger happened in \"\"\" return", "_format_message_to_quake(self, channel, author, content): \"\"\" Format the channel, author, and content of a", "are found for the mentions used, this player is told what the alternatives", "self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not", "started. We will set up the bot here with the right commands, and", "= [user for user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in sorted(matches,", "return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel that matches", "discord, and provides certain commands in the relay and triggered channels as well", "game = minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def", "called when a vote was started. The method sends a corresponding message to", "channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True,", "two modes can be combined, i.e. full relay to a broadcast channel, and", "matcher = re.compile(message_filter) if matcher.match(msg): return True return False def handle_ql_chat(self, player: minqlx.Player,", "the command that was sent by the user \"\"\" @minqlx.next_frame def f(): try:", "python installation, i.e. python3 -m pip install -U discord.py \"\"\" import re import", "* full relay between Quake Live chat and discord, where every text message", "= author.nick if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix,", "if ch.name.lower() == match.lower()] if len(channel) == 1: return channel[0] # then we", "discord and Quake Live chat where a prefix needs to be used for", "network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the mydiscordbot,", "replace mentions (@user and #channel) for triggered messages sent towards the triggered channels", "message sent for authentication :param password: the password to authenticate \"\"\" if password", "discord's formattings.) :param text: the text that shall be escaped for discord chat", "return False return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates", "= re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for user in self.discord.get_all_members()] matches =", "return int_set def status(self): if self.discord is None: return \"No discord connection set", "match fail, we try to match portions of the name or portions of", "provided. :param channel_ids: the ids of the channels the topic should be set", "1: return member[0] # if direct searches for the match fail, we try", "on the Quake Live server to discord. :param player: the player that sent", "to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and #channel) for messages", "called when the game is in countdown, i.e. about to start. This function", "start. This function mainly updates the topics of the relay channels and the", "player class to relay messages to discord \"\"\" def __init__(self, client, author, discord_channel):", "message to send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids,", "[ch for ch in channel_list if ch.name.lower() == match.lower()] if len(channel) == 1:", "the version command related code. The basic ideas for this plugin came from", "extended logging for the discord library (logs to minqlx_discord.log in the homepath) \"\"\"", "-1)] if len(member) == 1: return list(member)[0] # we found more than one", "a channel that matches the given match :param match: the match to look", "msg): \"\"\" overwrites the channel.reply function to relay messages to discord :param msg:", "the trigger happened in \"\"\" try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players", "extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG)", ":param msg: the message the player sent (includes the trigger) :param channel: the", "channel is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match,", "if len(msg) == 2 and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to", "for user in member_list if user.name.lower().find(match.lower()) != -1 or (user.nick is not None", "if game is None: return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players))", "item in string_set: if item == '': continue value = int(item) int_set.add(value) return", "the trigger :param msg: the message the player sent (includes the trigger) :param", "channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in():", "method sends a corresponding message to the discord relay channels. and updates the", "def get_ending_note(self): \"\"\" Provides the ending_note for the help output. \"\"\" command_name =", "channel_ids for kept topic suffixes and the related suffixes. Make sure to use", "self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return True return False def handle_ql_chat(self, player:", "\"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message =", "send to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:]))", "player that send to the trigger :param msg: the original message the player", "game.map_title if game.map_title else game.map gametype = game.type_short.upper() reply = \"{0} on **{1}**", "and triggered channels as well as private authentication to the bot to admin", "self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if", "dummy player class to relay messages to discord \"\"\" def __init__(self, client, author,", "channel_id: the id of the channel to get the topic from :return: the", "discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\"", "discord provided. :param channel_ids: the ids of the channels the message should be", "self.client = client self.author = author self.discord_channel = discord_channel def __repr__(self): return \"{}", "is send through discord. Here the main interaction points either back to Quake", "message from QL with this text portion. Useful when running multiple servers on", "!= -1 or (user.nick is not None and user.nick.lower().find(match.lower()) != -1)] if len(member)", "minutes (300 seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \"", "reply = \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply)", "was sent on a private chat to the bot :param ctx: the context", "portion. Useful when running multiple servers on the same host with the same", "in :param qlx_command: the command that was sent by the user \"\"\" @minqlx.next_frame", "discord, where every text message that is happening is forwarded to the other", "try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients", "Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents)", "member_list, player) if member is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message", "configured. :param player: the player that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content)", "discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main", "the logger used for logging, usually passed through from the minqlx plugin. \"\"\"", "channels to update the topic on :param topic: the topic to set on", "via private message * qlx_discordExecPrefix (default: \"qlx\") command for authenticated users to execute", "reverse=True): if match in [\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player)", "that was sent :param channel: the chnannel the message was sent to \"\"\"", "self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and its interactions on the", "Handler called when a vote was started. The method sends a corresponding message", "client, author, discord_channel): super().__init__(\"discord\") self.client = client self.author = author self.discord_channel = discord_channel", "auth attempts, we will bar her/him from authentication for 5 minutes (300 seconds)", "the bot, and init the main discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix,", "until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents", "configured relay or triggered channel :param ctx: the context the trigger happened in", "escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\"", "\"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2 and msg[1] == \"connect\": self.logger.info(\"Connecting to", "minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game", "to relay messages to discord :param msg: the msg to send to this", "for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of channel ids", "every text message that is happening is forwarded to the other system, and", "regular expression will make sure that the \"#channel\" has at least three characters,", "mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\"", "discord. Here the main interaction points either back to Quake Live or discord", "the actual cvar values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding", "more than one are found \"\"\" # try a direct channel name match", "Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\")", "topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic on", "= mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title if game.map_title", "in case it gets disconnected. :param player: the player that send to the", "len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not None: returned_message", "handle_vote_started(self, caller, vote, args): \"\"\" Handler called when a vote was started. The", "display the channel name of the discord channel for configured relay channels *", "your own one, except for the version command related code. The basic ideas", "\".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned discord", "minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player,", "to look for in the user name and nick :param member_list: the list", "from the relay channels back to Quake Live. if message.channel.id in self.discord_relay_channel_ids: content", "\"\"\" # when the message did not include anything to forward, show the", "== self.discord.user: return # relay all messages from the relay channels back to", "will be prefixed with this prefix * qlx_discordEnableHelp (default: \"1\") indicates whether the", "if the bot sent the message himself, do nothing. if message.author == self.discord.user:", "via discord private messages to the discord bot. * qlx_discordAuthCommand (default: \"auth\") command", "self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx, password: str): \"\"\" Handles the authentication", "users to execute server commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended", ":class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the ending_note for", "(default: \"1\") indicates whether the bot will respond to !version or responses are", "send to the trigger :param msg: the message the player sent (includes the", "relay a message to the configured relay_channels :param msg: the message to send", "= author self.discord_channel = discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self,", "discord in case it gets disconnected. :param player: the player that send to", "= previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if position != -1 else previous_topic", "self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password. You have", "self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix != \"\": content", "to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater()", "msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2", "minqlx channel class to respond to from within minqlx for interactions with discord", "bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a", "the id of the channel to get the topic from :return: the topic", "args: any arguments of the vote, i.e. map name, which player to kick,", "not be relayed to discord \"\"\" for message_filter in self.discord_message_filters: matcher = re.compile(message_filter)", "int_set def status(self): if self.discord is None: return \"No discord connection set up.\"", "not self.is_discord_logged_in(): return message returned_message = message # this regular expression will make", "current topic topic_ending = topic[-10:] for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if", "for ch in channel_list if ch.name == match] if len(channel) == 1: return", "\"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content = \"**{}**: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name),", "current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients", "= self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop)", "direct match with the channel name channel = [ch for ch in channel_list", "specific messages from another channel. For a description on how to set up", "function mainly updates the topics of the relay channels and the triggered channels", "message.clean_content to avoid ids of mentioned users and channels on the discord server.", "the discord client \"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop)", "in the current topic topic_ending = topic[-10:] for channel_id in channel_ids: previous_topic =", "happen. :return: the original message replaced by properly formatted user mentions \"\"\" if", "@staticmethod def player_data(): \"\"\" Formats the top 5 scorers connected to the server", "quake live will be prefixed with this prefix * qlx_discordEnableHelp (default: \"1\") indicates", "super().__init__() self.version_information = version_information self.logger = logger self.discord = None self.authed_discord_ids = set()", "channel: the channel, the message came from. :param author: the author of the", "re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for ch in self.discord.get_all_channels() if ch.type in", "self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content =", "case-insensitive direct match with the channel name channel = [ch for ch in", "relay channels. \"\"\" game = self.game if game is None: return topic =", "\"1\") Boolean flag to indicate whether to update the topic with the current", "for interactions with discord \"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client =", "logger self.discord = None self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\")", "<https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken (default: \"\") The token of the", "team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg): \"\"\" Checks whether", "\"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\")", "dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the bot, and init", "return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self): \"\"\" Function", "def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for all chat", "\"\"\" if reason in [\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]: reason_str =", "topic with the current game state on triggered relay channels. Your bot needs", "map is changed. The method sends a corresponding message to the discord relay", "msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message to a set of", "None and user.nick.lower() == match.lower()] if len(member) == 1: return member[0] # if", "game.maxclients maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper() # CAUTION:", "topic_ending = topic[-10:] for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is", "(default: \"\") Comma separated list of channel ids for full relay. * qlx_discordRelayTeamchatChannelIds", "content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\"", "just list the top players up to the given limit :return: a discord", "general chat, etc. \"\"\" if len(msg) > 2 or (len(msg) == 2 and", "\"\"\" Called when the SimpleAsyncDiscord thread is started. We will set up the", "2 or (len(msg) == 2 and msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]):", "the topic of the channel \"\"\" channel = self.discord.get_channel(channel_id) if channel is None:", "player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None):", "the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to quake live", "DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites the player.tell function to relay", "if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\"", "the chnannel the message was sent to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\":", "return minqlx.RET_USAGE if len(msg) == 2 and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\")", "and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return", "final 10 characters from the topic, and search for it in the current", "\"\" for alternative_member in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def", "\".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player, message): \"\"\" send a triggered message", ":param channel: the chnannel the message was sent to \"\"\" handled_channels = {\"chat\":", "discord * triggered relay of specific messages between discord and Quake Live chat", "topic to set on the given channels \"\"\" # if there are not", "\"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg):", "def status(self): if self.discord is None: return \"No discord connection set up.\" if", "return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str): \"\"\" Relays a", "kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content", "1: return member[0] # then try a direct match at the user's nickname", "match case-sensitive first channel = [ch for ch in channel_list if ch.name ==", "\"\"\" channel = self.discord.get_channel(channel_id) if channel is None: return None return channel.topic def", "lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message,", "roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\" relay a message to the", "the configured triggered_channel :param player: the player that originally sent the message :param", "= Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\")", "\"\"\" super().__init__() self.version_information = version_information self.logger = logger self.discord = None self.authed_discord_ids =", "Format the channel, author, and content of a message so that it will", "the match fail, we try to match portions of the name or portions", "message was sent to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to red", "ctx): \"\"\" overrides the default command error handler so that no exception is", "method sends a corresponding message to the discord relay channels. :param votes: the", "0: return # send the message in its own thread to avoid blocking", "def version(self, ctx): \"\"\" Triggers the plugin's version information sent to discord :param", "barred from authentication for {} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start()", "the alternatives are. None is returned in that case. :return: the matching channel,", "sent the message :param message: the content of the message \"\"\" if not", "* qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of channel ids where the topic", "content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called", "trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether", "== self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully authenticated. \" \"You can", "is returned in that case. :return: the matching channel, or None if none", "version information sent to discord :param ctx: the context the trigger happened in", "a plugin is unloaded to make sure, that the connection to discord is", "f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message was either sent in", "hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\",", "guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the bot, and", "= re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for ch in self.discord.get_all_channels() if ch.type", "= self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord", "we found more than one matching channel, let's tell the player about this.", "list of regular expressions for messages that should not be sent from quake", "on the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def", "the channel name channel = [ch for ch in channel_list if ch.name.lower().find(match.lower()) !=", "password to authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have", "self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler", "self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\",", "None def triggered_message(self, player, message): \"\"\" send a triggered message to the configured", "get_game_info(game): \"\"\" Helper to format the current game.state that may be used in", "(default: \"1\") indicates whether the bot will respond to !help or responses are", "async def triggered_chat(self, ctx, *message: str): \"\"\" Relays a message from the triggered", "None: sender = author.nick if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0}", "import logging import os from logging.handlers import RotatingFileHandler import minqlx from minqlx import", "else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic", "to replace the user mentions in :param player: (default: None) when several alternatives", "vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed): \"\"\"", "a message to a set of channel_ids on discord provided. :param channel_ids: the", "basic types of relay in this basic version of a discord plugin: *", "when a player disconnects. The method sends a corresponding message to the discord", "any triggered message from QL with this text portion. Useful when running multiple", "def game_status_information(game: minqlx.Game): \"\"\" Generate the text for the topic set on discord", "msg: the message the player sent (includes the trigger) :param channel: the channel", "with discord \"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client = client self.author", "first channel = [ch for ch in channel_list if ch.name == match] if", "for the mentions used, this player is told what the alternatives are. None", ":return: the formatted message that may be sent back to Quake Live. \"\"\"", "{} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler", "* qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin of the server via discord", "is started. We will set up the bot here with the right commands,", "\"\"\" Checks whether a message was either sent in a configured relay or", "as '0', you can leave it unchecked. By default, this will be enabled", "int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\")", "in countdown, i.e. about to start. This function mainly updates the topics of", "= None self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids =", "is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message was either sent in a configured", "to Quake Live. if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if len(content) >", "server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version", "the scores and connection time to the server \"\"\" player_data = \"\" teams", "channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and #channel) for triggered messages", "channels and keeps the topic suffix intact on the configured channels :param channel_ids:", "list(member)[0] # we found more than one matching member, let's tell the player", "the topic with the current game state on triggered relay channels. Your bot", "content = \"*Vote passed ({} - {}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content)", "len(channel_ids) == 0: return # take the final 10 characters from the topic,", "= minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle =", "* qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the trigger on triggered relay channels.", "the topic in its own thread to avoid blocking of the server for", "\"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2 and msg[1] == \"connect\": self.logger.info(\"Connecting", "channel = self.discord.get_channel(channel_id) if channel is None: return None return channel.topic def stop(self):", "self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly set the topic on channels with", "channel topics. :param game: the game object to derive the information from :return:", "def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler for reconnecting the discord bot", "= discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True,", "the alternatives are. No replacements for the ambiguous substitutions will happen. :return: the", "regular expression will make sure that the \"@user\" has at least three characters,", "threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update the topics on", "to the bot :param ctx: the context the trigger happened in \"\"\" return", "{}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\" Handler called when a vote", "\"1\") display the channel name of the discord channel for configured relay channels", "to get the topic from :return: the topic of the channel \"\"\" channel", "game object to derive the information from :return: the current text representation of", "their score :param player_list: the list of players to generate the team output", "make sure that the \"@user\" has at least three characters, and is either", "handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for all chat messages", "return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether an author is", "this text portion. Useful when running multiple servers on the same host with", "qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to quake live will be prefixed with", "was passed or failed. The method sends a corresponding message to the discord", "topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic on a set of", "previous_topic[position + len(topic_ending):] if position != -1 else previous_topic if channel_id in self.discord_kept_topic_suffixes:", "self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic on the", "== 1: return channel[0] # then try a case-insensitive direct match with the", "@property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites the", "topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix on the", "be used in status messages and setting of channel topics. :param game: the", "for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is None: previous_topic =", "that the \"@user\" has at least three characters, and is either # prefixed", "handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called when a player disconnects. The method", "whether a message was either sent in a configured relay or triggered channel", "< 0: return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\":", "be hidden to the given channel :param player: the player that originally sent", "the bot to playing Quake Live on discord. \"\"\" self.logger.info(\"Logged in to discord", "given channels and keeps the topic suffix intact on the configured channels :param", "will set up the bot here with the right commands, and run the", "player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member), match)) alternatives = \"\" for alternative_member", "and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx, password: str): \"\"\" Handles the", "used in topics to indicate reveal more data about the server and its", "Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run()", "this regular expression will make sure that the \"@user\" has at least three", "match for the whole name first member = [user for user in member_list", "= SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval", "will happen. :return: the original message replaced by properly formatted channel mentions \"\"\"", "discord \"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client = client self.author =", "= set() for item in string_set: if item == '': continue value =", "help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the", "@staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel that matches the given", "return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix", "channel :param ctx: the context the trigger happened in \"\"\" try: game =", "in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match:", "from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the trigger on triggered", "when configured. :param player: the player that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name))", "if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version information\")) def reply_to_context(self,", "for user in member_list if user.name.lower() == match.lower()] if len(member) == 1: return", "self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler of the !discord", "map change, kick player, etc. :param args: any arguments of the vote, i.e.", "self.reply_to_context(ctx, \"You have been successfully authenticated. \" \"You can now use {}{} to", "author.name if author.nick is not None: sender = author.nick if not self.discord_show_relay_channel_names and", ")@([^ ]{3,})\") member_list = [user for user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for", "self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if", "connected to the discord server :param player: (default: None) when several alternatives are", "\"\"\" Checks whether a message was sent on a private chat to the", "whether the vote passed \"\"\" if passed: content = \"*Vote passed ({} -", "\"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg) == 2", "a new event_loop until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions", "except minqlx.NonexistentGameError: reply = \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0}", "to update the topics on all the relay and all the triggered channels", "is_discord_logged_in(self): if self.discord is None: return False return not self.discord.is_closed() and self.discord.is_ready() def", "topics of the relay channels and the triggered channels (when configured), and sends", "preserve the original channel's topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):]", "qlx_discordRelayChannelIds (default: \"\") Comma separated list of channel ids for full relay. *", "checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled:", "matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match", "passed: boolean indicating whether the vote passed \"\"\" if passed: content = \"*Vote", "the trigger :param msg: the original message the player sent (includes the trigger)", "the ids of the channels the topic should be set upon. :param topic:", "player, message): \"\"\" send a triggered message to the configured triggered_channel :param player:", "is None: return None return channel.topic def stop(self): \"\"\" stops the discord client", "discord client \"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def", "Handler for reconnecting the discord bot to discord in case it gets disconnected.", "ids where the topic suffix will be kept upon updating. * qlx_discordUpdateTopicInterval (default:", "= set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids", "= logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int)", "lambda ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the", "message to replace the user mentions in :param player: (default: None) when several", "are. None is returned in that case. :return: the matching channel, or None", "^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self): \"\"\" Function called once", "self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\",", "\"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread", "{} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum auth attempts, we", "the ids of the channels the message should be sent to. :param content:", "Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status", "DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message,", "self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message): \"\"\"", "to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send", "are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\"", "message came through :param message: the content of the message \"\"\" if self.discord_replace_relayed_mentions:", "of members connected to the discord server :param player: (default: None) when several", "ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx, password: str):", "**{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except", "with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on", "messages to discord :param msg: the msg to send to this player \"\"\"", "if message.author == self.discord.user: return # relay all messages from the relay channels", ":param message: the content of the message \"\"\" if not self.discord_triggered_channel_ids: return if", "qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and #channel) for messages sent towards relay", "connected :param reason: the reason why the player left \"\"\" if reason in", "is a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def", "topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg,", "game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats the top 5 scorers connected", "def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message originate in a configured triggered", "plugin via private message * qlx_discordExecPrefix (default: \"qlx\") command for authenticated users to", "bot to discord in case it gets disconnected. :param player: the player that", "you did configured and of the qlx_discordReplaceMentions cvars as '0', you can leave", "with the channel name channel = [ch for ch in channel_list if ch.name.lower()", "Handler function for all chat messages on the server. This function will forward", "= Plugin.teams() if len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) >", "object to derive the information from :return: the current text representation of the", "SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic =", "and user.nick.lower() == match.lower()] if len(member) == 1: return member[0] # if direct", "ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return \"Match in progress:", "len(msg) == 2 and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\")", "self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update the topics", "left \"\"\" if reason in [\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]: reason_str", "qlx_discordEnableVersion (default: \"1\") indicates whether the bot will respond to !version or responses", "\"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s", "scores and connection time to the server \"\"\" player_data = \"\" teams =", "return if len(msg) == 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting", "discord bot and its interactions on the discord server if discord_client is None:", "channels' topics! return \"{0} on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype,", "minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title", ":param votes: the final votes :param vote: the initial vote that passed or", "on_command_error(self, exception, ctx): \"\"\" overrides the default command error handler so that no", "sure, that the connection to discord is properly closed when this plugin is", "Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\",", "on :param topic: the topic to set on the given channels \"\"\" #", "message to check whether it should be filtered :return whether the message should", "channel the original message came through :param message: the content of the message", "topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get", "self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception, ctx): \"\"\" overrides the default command", "== 0: return # set the topic in its own thread to avoid", "use single quotes for the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix for", "if self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix != \"\": content = \"{}", "discord.py \"\"\" import re import asyncio import threading import logging import os from", "re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for user in self.discord.get_all_members()] matches = matcher.findall(returned_message)", "ids for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of channel", "There are two basic types of relay in this basic version of a", "about to start. This function mainly updates the topics of the relay channels", "continue value = int(item) int_set.add(value) return int_set def status(self): if self.discord is None:", "channel, author, content): \"\"\" Format the channel, author, and content of a message", "channel.reply function to relay messages to discord :param msg: the message to send", "1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password. You have {} attempts", "= os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt =", "at least three characters, and is either # prefixed by a space or", "# then try a direct match at the user's nickname member = [user", "Quake Live chat and configured discord channels. There are two basic types of", "Handler called when a map is changed. The method sends a corresponding message", "reply(self, msg): \"\"\" overwrites the channel.reply function to relay messages to discord :param", "\"\"\" # if we were not provided any channel_ids, do nothing. if not", "relay channels' topics! return \"{0} on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle),", "formatting to discord (i.e. replace '*' (asterisks) with a variant to not interfere", "the team's player by their score :param player_list: the list of players to", "free to modify this plugin to your own one, except for the version", "ideas for this plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been", ":param message: the message to send to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix,", "gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper to format the current game.state", "author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property def channel(self):", "__init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client = client self.author = author self.discord_channel =", "ch in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for", "content): \"\"\" Send a message to a set of channel_ids on discord provided.", "None: previous_topic = topic # preserve the original channel's topic. position = previous_topic.find(topic_ending)", "Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin is unloaded", "that might be hidden to the given channel :param player: the player that", "description on how to set up a bot for you discord network take", "self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\"", "player that originally sent the message :param message: the content of the message", "the discord bot to discord in case it gets disconnected. :param player: the", "self.author.display_name) def reply(self, msg): \"\"\" overwrites the channel.reply function to relay messages to", "or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False,", "ctx: the context the trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self,", "\"Warmup\" @staticmethod def player_data(): \"\"\" Formats the top 5 scorers connected to the", "private messages to the discord bot. * qlx_discordAuthCommand (default: \"auth\") command for authenticating", "plugin's version information\")) def reply_to_context(self, ctx, message): return ctx.send(message) async def version(self, ctx):", "the trigger happened in \"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0", "Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\")", "guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the bot, and init the main", ".format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to 3 attempts for the user's discord", "SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix", "triggered channels :param topic: the topic to set on all the channels \"\"\"", "loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message was either", "member_list if user.name.lower().find(match.lower()) != -1 or (user.nick is not None and user.nick.lower().find(match.lower()) !=", "the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda", "# :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion # of the triggered relay", "msg: the message that was sent :param channel: the chnannel the message was", "and keeps the topic suffix intact on the configured channels :param channel_ids: the", ":param mapname: the new map :param factory: the map factory used \"\"\" content", "reconnecting the discord bot to discord in case it gets disconnected. :param player:", "via private message :param ctx: the context of the original message sent for", "message is send through discord. Here the main interaction points either back to", "for the mentions used, this player is told what the alternatives are. No", "from discord to quake live will be prefixed with this prefix * qlx_discordEnableHelp", "'': continue value = int(item) int_set.add(value) return int_set def status(self): if self.discord is", "= game.maxclients maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper() #", "channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is None: previous_topic = topic # preserve", "logger: the logger used for logging, usually passed through from the minqlx plugin.", "plugin's version information sent to discord :param ctx: the context the trigger happened", "@minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help", "qlx_discordBotToken (default: \"\") The token of the discord bot to use to connect", "of the relay channels and the triggered channels (when configured), and sends a", "self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version information\")) def reply_to_context(self, ctx,", "and updates the relay channel topic as well as the trigger channels, when", "the message that was sent. \"\"\" # guard clause to avoid None messages", "told what the alternatives are. None is returned in that case. :return: the", "qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin of the server via discord private", "the message :param msg: the message that was sent :param channel: the chnannel", "context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async", "channel[0] # then try a case-insensitive direct match with the channel name channel", "= Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) #", "discord relay channels. and updates the relay channel topic as well as the", "return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg))", "in the Quake Live console. :param channel: the channel, the message came from.", "channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\" relay a", "the player's of that team by their score \"\"\" if len(player_list) == 0:", "msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status())", "Mainly displays status update from the bot in the game console and server", "and not be sent to discord. :param msg: the message to check whether", "def __init__(self, client, author, discord_channel): super().__init__(\"discord\") self.client = client self.author = author self.discord_channel", "no game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply)", "self.discord_relay_channel_ids: content = message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async", "formatted user mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message #", "vote, i.e. map name, which player to kick, etc. :param passed: boolean indicating", "discord user mentions. If you don't need that, i.e. you did configured and", "text portion. Useful when running multiple servers on the same host with the", "Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\",", "int_set.add(value) return int_set def status(self): if self.discord is None: return \"No discord connection", "towards the originating channel :param ctx: the context the trigger happened in \"\"\"", "do nothing. if not channel_ids or len(channel_ids) == 0: return # set the", "if not self.is_discord_logged_in(): return message returned_message = message # this regular expression will", "server :param player: (default: None) when several alternatives are found for the mentions", "self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\")", "client \"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self,", "should be set upon. :param topic: the new topic that should be set.", "a bot for you discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of", "the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the topic", "game state \"\"\" if game.state == \"warmup\": return \"Warmup\" if game.state == \"countdown\":", "def auth(self, ctx, password: str): \"\"\" Handles the authentication to the bot via", "def __init__(self, version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord client the discord bot", "return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a message to", "errors Might be changed in the future to log those problems to the", "None: return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player:", "bot, and init the main discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information),", "\"\"\" # try a direct channel name match case-sensitive first channel = [ch", "name channel = [ch for ch in channel_list if ch.name.lower().find(match.lower()) != -1] if", ":return: the matching channel, or None if none or more than one are", "channel of the Bus Station server(s). You need to install discord.py in your", "team chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of channel ids", "returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find a user that matches the", "messages on the Quake Live server to discord. :param player: the player that", "qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and #channel) for triggered messages sent towards", "Your bot needs edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma", "file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt =", "and running.\" return \"Discord client not connected.\" def run(self): \"\"\" Called when the", "def replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned discord user (indicated by @user-hint", "member, or None if none or more than one are found \"\"\" #", "del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec", "the topics on all the relay and all the triggered channels :param topic:", "information. This is a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx", "\"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether a message was", "commands in the relay and triggered channels as well as private authentication to", "self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg) == 2 and", "once a message is send through discord. Here the main interaction points either", "return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic", "what the alternatives are. No replacements for the ambiguous substitutions will happen. :return:", "players_by_score = players_by_score[:limit] team_data = \"\" for player in players_by_score: team_data += \"**{}**({})", "in that case. :return: the matching channel, or None if none or more", "prefix for the trigger on triggered relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger", "relay messages to discord :param msg: the message to send to this channel", "= RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s]", "if not self.is_discord_logged_in(): return # if we were not provided any channel_ids, do", "for match in sorted(matches, key=lambda user_match: len(user_match), reverse=True): if match in [\"all\", \"everyone\",", "the beginning of the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch", "= Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop,", "channel mentions in :param player: (default: None) when several alternatives are found for", "DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to respond to from within minqlx for", "help output. \"\"\" command_name = self.context.invoked_with return \"Type {0}{1} command for more info", "private authentication to the bot to admin the server. \"\"\" def __init__(self, version_information,", "import ChannelType, AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version =", "discord bot to discord in case it gets disconnected. :param player: the player", "quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and #channel)", "topic of the provided channel id :param channel_id: the id of the channel", "for more info on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass class", "channel, message): \"\"\" relay a message to the given channel :param player: the", "minqlx plugin's bot to provide help information. This is a customized variation of", "the help output. \"\"\" command_name = self.context.invoked_with return \"Type {0}{1} command for more", "= returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find a", "messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of channel ids for triggered", "permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of channel", "If you don't need that, i.e. you did configured and of the qlx_discordReplaceMentions", "relay messages to discord :param msg: the msg to send to this player", "return # if we were not provided any channel_ids, do nothing. if not", "user in member_list if user.nick is not None and user.nick.lower() == match.lower()] if", "the player that originally sent the message :param message: the content of the", "== \"in_progress\": return \"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod", "return # set the topic in its own thread to avoid blocking of", "prefix needs to be used for the messages to be forwarded. These two", "__init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the ending_note for the help output.", "Handles exec messages from discord via private message to the bot :param ctx:", "to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\" Handler called when a", "to send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content):", "pass_context=True, ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...]", "member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None): \"\"\"", "asyncio import threading import logging import os from logging.handlers import RotatingFileHandler import minqlx", "= mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content = \"_{} called a vote:", "kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\"", "\"\"\" SimpleAsyncDiscord client which is used to communicate to discord, and provides certain", "the beginning of the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user", "self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully authenticated. \" \"You can now", "True return False def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function", "case. :return: the matching member, or None if none or more than one", "not self.is_discord_logged_in(): return # if we were not provided any channel_ids, do nothing.", "Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar", "that the \"#channel\" has at least three characters, and is either # prefixed", "message # this regular expression will make sure that the \"#channel\" has at", "qlx_discordTriggerStatus (default: \"status\") Trigger for having the bot send the current status of", "content of the message \"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message =", "content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\"", "Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled =", "\"\"\" Handler of the !discord command. Forwards any messages after !discord to the", "the message to send to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply(", "\"\"\" find a user that matches the given match :param match: the match", "* qlx_discordEnableVersion (default: \"1\") indicates whether the bot will respond to !version or", "the right portion # of the triggered relay channels' topics! return \"{0} on", "the list of players to generate the team output for :param limit: (default:", "not None: player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member), match)) alternatives = \"\"", "\"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx,", "Generate the text for the topic set on discord channels. :param game: the", "RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s:", "seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You will", "player=None): \"\"\" find a channel that matches the given match :param match: the", ":param channel_ids: the ids of the channels the topic should be set upon.", "\"\"\" try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players =", "not connected.\" def run(self): \"\"\" Called when the SimpleAsyncDiscord thread is started. We", "discord id to authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id]", "the message the player sent (includes the trigger) :param channel: the channel the", "fragstealers_inc discord tech channel of the Bus Station server(s). You need to install", "send to the discord channels \"\"\" if not self.is_discord_logged_in(): return # if we", "# try a direct match for the whole name first member = [user", "\"\"\" if len(player_list) == 0: return \"\" players_by_score = sorted(player_list, key=lambda k: k.score,", "\"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not None: returned_message", "> 2 or (len(msg) == 2 and msg[1] not in [\"status\", \"connect\", \"disconnect\",", "(default: \"\") Comma separated list of channel ids for relaying team chat messages.", "len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data +=", "message came through, i.e. team chat, general chat, etc. \"\"\" if len(msg) >", "scorers connected to the server in a string. The return value may be", "Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\",", "relay chat between the Quake Live chat and configured discord channels. There are", "self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the text for the topic", "accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the", "at the user's nickname member = [user for user in member_list if user.nick", "trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether", "the server via discord private messages to the discord bot. * qlx_discordAuthCommand (default:", "to admin the server. \"\"\" def __init__(self, version_information, logger): \"\"\" Constructor for the", "list of channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any", "and provides certain commands in the relay and triggered channels as well as", "{5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no", "MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is to create", "= \"\" for alternative_member in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None", "len(channel) == 1: return channel[0] # then we try a match with portions", "alternatives are found for the mentions used, this player is told what the", "the authentication to the bot via private message :param ctx: the context of", "sends a corresponding message to the discord relay channels. and updates the relay", "\"status\") Trigger for having the bot send the current status of the game", "same host with the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean", "sent towards the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin", "return \"{0} on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players)", "matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel =", "matching channel, let's tell the player about this. if len(channel) > 1 and", "the match to look for in the user name and nick :param member_list:", "is returned in that case. :return: the matching member, or None if none", "= \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set))", "== match] if len(channel) == 1: return channel[0] # then try a case-insensitive", "topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic,", "= returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a", "async def on_command_error(self, exception, ctx): \"\"\" overrides the default command error handler so", "try a case-insensitive direct match with the channel name channel = [ch for", "the context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids", "from discord via private message to the bot :param ctx: the context the", "team chat, general chat, etc. \"\"\" if len(msg) > 2 or (len(msg) ==", "chat, general chat, etc. \"\"\" # when the message did not include anything", "if ch.name == match] if len(channel) == 1: return channel[0] # then try", "to the configured triggered_channel :param player: the player that originally sent the message", "\"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\",", "triggered relay channels. Your bot needs edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds", "text representation of the game state \"\"\" if game.state == \"warmup\": return \"Warmup\"", "suffix on the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic)", "mentioned discord channel (indicated by #channel-hint with a real mention :param message: the", "= logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def", "channel_list, player=None): \"\"\" find a channel that matches the given match :param match:", "= \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\" Handler", "connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate whether to update", "in member_list if user.name.lower() == match.lower()] if len(member) == 1: return member[0] #", "Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken (default: \"\") The token of", "all the triggered channels :param topic: the topic to set on all the", "10 characters from the topic, and search for it in the current topic", "= [ch for ch in channel_list if ch.name.lower() == match.lower()] if len(channel) ==", "This is a plugin created by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You", "happen. :return: the original message replaced by properly formatted channel mentions \"\"\" if", "barred from authentication to the bot :param ctx: the context the trigger happened", "qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name of the discord channel for configured", "\"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\",", "enable the Server Members Intent for the bot in order to be able", "in your python installation, i.e. python3 -m pip install -U discord.py \"\"\" import", "self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information())", "author.nick if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender,", "msg: the original message the player sent (includes the trigger) :param channel: the", "return # relay all messages from the relay channels back to Quake Live.", "discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated list of channel ids for full", "channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if", "send to the trigger :param msg: the original message the player sent (includes", "can be combined, i.e. full relay to a broadcast channel, and specific messages", "= Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now configured bot", "class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class to relay messages to discord", "discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the trigger on triggered relay", "generate the team output for :param limit: (default: None) just list the top", "from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg) == 2 and msg[1]", "send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\"", "\"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx,", "channel_ids or len(channel_ids) == 0: return # set the topic in its own", "used for status messages and used in topics to indicate reveal more data", "channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self):", "not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None):", "qlx_discordEnableHelp (default: \"1\") indicates whether the bot will respond to !help or responses", "of the message \"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message,", "Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg) == 2 and msg[1] ==", "when configured. :param mapname: the new map :param factory: the map factory used", "context the trigger happened in :param message: the message to send to minqlx", "Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def", "nothing. if not channel_ids or len(channel_ids) == 0: return # take the final", "kick player, etc. :param args: any arguments of the vote, i.e. map name,", "self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg) == 2 and", "topic, and search for it in the current topic topic_ending = topic[-10:] for", "the original message replaced by properly formatted user mentions \"\"\" if not self.is_discord_logged_in():", "to discord (i.e. replace '*' (asterisks) with a variant to not interfere with", "a vote was passed or failed. The method sends a corresponding message to", "we try to match portions of the name or portions of the nick,", "Constructor for the SimpleAsyncDiscord client the discord bot runs in. :param version_information: the", "[user for user in member_list if user.name.lower().find(match.lower()) != -1 or (user.nick is not", "more than one are found \"\"\" # try a direct match for the", "# preserve the original channel's topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position +", "status messages and setting of channel topics. :param game: the game object to", "admin the server. \"\"\" def __init__(self, version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord", "= \"\" for alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None", "topic on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep", "self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id]))", "message so that it will be displayed nicely in the Quake Live console.", "use to connect to discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated list of", ":param channel_ids: the ids of the channels the message should be sent to.", "\"<PASSWORD>\") passwort for remote admin of the server via discord private messages to", "= discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version:", "(default: \"status\") Trigger for having the bot send the current status of the", "try: game = minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start()", "Function called once a message is send through discord. Here the main interaction", "Comma separated list of channel ids for relaying team chat messages. * qlx_discordTriggeredChannelIds", "separated list of channel ids for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma", "a discord user to the plugin via private message * qlx_discordExecPrefix (default: \"qlx\")", "the map factory used \"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def", "Useful when running multiple servers on the same host with the same discord", "or failed, i.e. map change, kick player, etc. :param args: any arguments of", "status(self): if self.discord is None: return \"No discord connection set up.\" if self.is_discord_logged_in():", "not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0:", "ctx: the context the trigger happened in :param qlx_command: the command that was", "reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message originate in a configured", "self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly set", "to discord :param ctx: the context the trigger happened in \"\"\" await self.reply_to_context(ctx,", "self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command", "will bar her/him from authentication for 5 minutes (300 seconds) bar_delay = 300", "discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message): \"\"\" Function called once", "discord via private message to the bot :param ctx: the context the trigger", "those problems to the minqlx.logger \"\"\" pass def _topic_updater(self): try: game = minqlx.Game()", "topic updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids for kept topic", "key=lambda user_match: len(user_match), reverse=True): if match in [\"all\", \"everyone\", \"here\"]: continue member =", "class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used to communicate to discord, and", "configured triggered channel :param ctx: the context the trigger happened in \"\"\" return", "channel :param player: the player that originally sent the message :param channel: the", "client, author, discord_channel): self.client = client self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name))", "bar her/him from authentication for 5 minutes (300 seconds) bar_delay = 300 await", "channel_ids, topic): \"\"\" Set the topic on a set of channel_ids on discord", "attempts reached. \" \"You will be barred from authentication for {} seconds.\" .format(bar_delay))", "be sent to. :param content: the content of the message to send to", "if self.is_discord_logged_in(): return \"Discord connection up and running.\" return \"Discord client not connected.\"", "{\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\":", "Called when the SimpleAsyncDiscord thread is started. We will set up the bot", "return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called when a", "why the player left \"\"\" if reason in [\"disconnected\", \"timed out\", \"was kicked\",", "Live\")) self._topic_updater() async def on_message(self, message): \"\"\" Function called once a message is", "self.discord_relay_channel_ids # directly set the topic on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids", "is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None: return False", "channel[0] # then we try a match with portions of the channel name", "to your own one, except for the version command related code. The basic", "messages on the server. This function will forward and messages on the Quake", "self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\",", "sorted output of the team's player by their score :param player_list: the list", "chat, etc. \"\"\" if len(msg) > 2 or (len(msg) == 2 and msg[1]", "\"auth\") command for authenticating a discord user to the plugin via private message", "to authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1", "def relay_chat_message(self, player, channel, message): \"\"\" relay a message to the given channel", "to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content", "the whole name first member = [user for user in member_list if user.name.lower()", "of the channels the message should be sent to. :param content: the content", "self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**:", "topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly set the", "users for @{}:\".format(len(member), match)) alternatives = \"\" for alternative_member in member: alternatives +=", "\"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod", "team's player by their score :param player_list: the list of players to generate", "after !discord to the discord triggered relay channels. :param player: the player that", "player, channel, message): \"\"\" relay a message to the given channel :param player:", "for ch in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message)", "to the other system, and some basic Quake Live status updates are send", "if passed: content = \"*Vote passed ({} - {}).*\".format(*votes) else: content = \"*Vote", "self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether a message was sent on", "currently barred from authentication to the bot :param ctx: the context the trigger", "on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the", "channels. :param votes: the final votes :param vote: the initial vote that passed", "replacements for the ambiguous substitutions will happen. :return: the original message replaced by", "find a user that matches the given match :param match: the match to", "for the whole name first member = [user for user in member_list if", "keep the topic suffix on the channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids", "len(msg) > 2 or (len(msg) == 2 and msg[1] not in [\"status\", \"connect\",", "triggered channels to minqlx :param ctx: the context the trigger happened in :param", "discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message)", "{} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self,", "r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason):", "caller, vote, args): \"\"\" Handler called when a vote was started. The method", "chat and discord, where every text message that is happening is forwarded to", "discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name,", "let's tell the player about this. if len(member) > 1 and player is", "text message that is happening is forwarded to the other system, and some", "on **{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data())", "characters from the topic, and search for it in the current topic topic_ending", "game.map_title if game.map_title else game.map gametype = game.type_short.upper() # CAUTION: if you change", "if you change anything on the next line, you may need to change", "for message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return True return False", "of the channel to get the topic from :return: the topic of the", ":param message: the content of the message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message,", "in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match", "change, kick player, etc. :param args: any arguments of the vote, i.e. map", "member, let's tell the player about this. if len(member) > 1 and player", "bot connected. Mainly displays status update from the bot in the game console", "\"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\")", "* qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name of the discord channel for", "\"\"\" Handler called when a map is changed. The method sends a corresponding", "a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the mydiscordbot, you also", "updates the relay channel topic as well as the trigger channels, when configured.", "text: the text that shall be escaped for discord chat channels \"\"\" escaped_text", "msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg)", "to enable the Server Members Intent for the bot in order to be", "return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the minqlx plugin's bot", "(default: \"\") The token of the discord bot to use to connect to", "team by their score \"\"\" if len(player_list) == 0: return \"\" players_by_score =", "discord_channel): self.client = client self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def", "set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int)", "return \"Discord client not connected.\" def run(self): \"\"\" Called when the SimpleAsyncDiscord thread", "main discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else:", "name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth", "\"\"\" Handler called when a player disconnects. The method sends a corresponding message", "hidden=True, pass_context=True, help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False,", "**{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply =", "int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\",", "player that connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\"", "def on_message(self, message): \"\"\" Function called once a message is send through discord.", "the channel the message came through, i.e. team chat, general chat, etc. \"\"\"", "happened in :param qlx_command: the command that was sent by the user \"\"\"", "called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args,", "<= 0 async def auth(self, ctx, password: str): \"\"\" Handles the authentication to", "authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if", "\"\"\" Triggers game status information sent towards the originating channel :param ctx: the", "to set up a bot for you discord network take a look `here", "Handler when a plugin is unloaded to make sure, that the connection to", "game to derive the status information from :return: the topic that represents the", "\"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\" relay a", "messages from processing. if not message: return # if the bot sent the", ":param vote: the initial vote that passed or failed, i.e. map change, kick", "def steam_id(self): return minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self,", "\"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player):", "The basic ideas for this plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and", "help text. if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to", "set the topic on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic)", "event_loop until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions", "do nothing. if not channel_ids or len(channel_ids) == 0: return # take the", "to the plugin via private message * qlx_discordExecPrefix (default: \"qlx\") command for authenticated", "of the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for user", "in a string. The return value may be used for status messages and", "self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1,", ":param topic: the new topic that should be set. \"\"\" # if we", "then we try a match with portions of the channel name channel =", "channels. :param game: the game to derive the status information from :return: the", "plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin is unloaded to make", "portion # of the triggered relay channels' topics! return \"{0} on **{1}** ({2})", "# when the message did not include anything to forward, show the usage", "text. if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord", "the context the trigger happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self,", "the vote passed \"\"\" if passed: content = \"*Vote passed ({} - {}).*\".format(*votes)", "channel_ids: the set of channels to update the topic on :param topic: the", "return None return channel.topic def stop(self): \"\"\" stops the discord client \"\"\" if", "(default: None) just list the top players up to the given limit :return:", "self.is_discord_logged_in(): return # if we were not provided any channel_ids, do nothing. if", "\"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided player's name for", "channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the topic of the", "command related code. The basic ideas for this plugin came from Gelenkbusfahrer and", "the list of members connected to the discord server :param player: (default: None)", "\\ self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content", "discord to quake live will be prefixed with this prefix * qlx_discordEnableHelp (default:", "all chat messages on the server. This function will forward and messages on", "self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix =", "class to respond to from within minqlx for interactions with discord \"\"\" def", "backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler", "and connection time to the server \"\"\" player_data = \"\" teams = Plugin.teams()", "discord triggered relay channels. :param player: the player that send to the trigger", "information from :return: the current text representation of the game state \"\"\" if", "import minqlx from minqlx import Plugin import discord from discord import ChannelType, AllowedMentions", "None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a message", "to format the current game.state that may be used in status messages and", "installation, i.e. python3 -m pip install -U discord.py \"\"\" import re import asyncio", "@minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when a player connects. The", "as the trigger channels, when configured. :param player: the player that connected \"\"\"", "= \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname,", "channels back to Quake Live. if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if", "progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats the", "reverse=True) if limit: players_by_score = players_by_score[:limit] team_data = \"\" for player in players_by_score:", "on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\" get the", "is not None: player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel), match)) alternatives =", "if not channel_ids or len(channel_ids) == 0: return # set the topic in", "for messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user", "the SimpleAsyncDiscord client the discord bot runs in. :param version_information: the plugin's version_information", "# directly set the topic on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids -", "the message in its own thread to avoid blocking of the server for", "admin of the server via discord private messages to the discord bot. *", "self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property def channel(self): return", "content)) async def on_command_error(self, exception, ctx): \"\"\" overrides the default command error handler", "is_barred_from_auth(self, ctx): \"\"\" Checks whether an author is currently barred from authentication to", "reason why the player left \"\"\" if reason in [\"disconnected\", \"timed out\", \"was", "alternative_member in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message,", "self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites the player.tell function to relay messages", ":return whether the message should not be relayed to discord \"\"\" for message_filter", "teams = Plugin.teams() if len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue'])", "the context the trigger happened in :param message: the message to send to", "automatic topic updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids for kept", "invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) #", "ChannelType.group]] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel", "is properly closed when this plugin is unloaded. :param plugin: the plugin that", "Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password =", "client not connected.\" def run(self): \"\"\" Called when the SimpleAsyncDiscord thread is started.", "be kept upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between automatic", "= self.discord_relay_channel_ids # directly set the topic on channels with no topic suffix", "channel_list if ch.name.lower() == match.lower()] if len(channel) == 1: return channel[0] # then", "responses are completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name", "server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to quake live will be", "import discord from discord import ChannelType, AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand", "@minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called when a player disconnects.", "bot needs edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated", "the message should not be relayed to discord \"\"\" for message_filter in self.discord_message_filters:", "to the discord relay channels. :param votes: the final votes :param vote: the", "*args, **kwargs): \"\"\" Handler called when the game is in countdown, i.e. about", "avoid ids of mentioned users and channels on the discord server. :return: the", "suffix intact on the configured channels :param channel_ids: the set of channels to", "channel name channel = [ch for ch in channel_list if ch.name.lower() == match.lower()]", "not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic on", "will make sure that the \"#channel\" has at least three characters, and is", ":param player: the player that connected :param reason: the reason why the player", "through :param message: the content of the message \"\"\" if self.discord_replace_relayed_mentions: message =", ":param topic: the topic to set on the given channels \"\"\" # if", "method sends a corresponding message to the discord relay channels. :param caller: the", "discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a discord bot", "self.is_discord_logged_in(): return message returned_message = message # this regular expression will make sure", "\"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the ending_note for the", "from logging.handlers import RotatingFileHandler import minqlx from minqlx import Plugin import discord from", "by the user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \"", "ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str): \"\"\" Relays a message", "data about the server and its current game. :return: string of the current", "self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\",", "to discord. :param player: the player that sent the message :param msg: the", "[%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt)", "\"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\")", "= Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def", "relay channels. and updates the relay channel topic as well as the trigger", "original channel's topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if position", "message, that might be hidden to the given channel :param player: the player", "encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\",", "content of the message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message =", "Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\",", "None messages from processing. if not message: return # if the bot sent", "formatted channel mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message #", "configured triggered_channel :param player: the player that originally sent the message :param message:", "authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully", "discord channels for #{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel in channel: alternatives", "ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\"))", "try a direct match for the whole name first member = [user for", "%(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) #", "Live status updates are send to discord * triggered relay of specific messages", "qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate whether to update the topic with", "string. The return value may be used for status messages and used in", "and run the discord.py bot in a new event_loop until completed. \"\"\" loop", "interaction points either back to Quake Live or discord happen. :param message: the", "authentication to the bot via private message :param ctx: the context of the", "plugin that was unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game:", "author, and content of a message so that it will be displayed nicely", "combined, i.e. full relay to a broadcast channel, and specific messages from another", "to be used for the messages to be forwarded. These two modes can", "boolean indicating whether the vote passed \"\"\" if passed: content = \"*Vote passed", "\"\"\" game = self.game if game is None: return topic = mydiscordbot.game_status_information(game) top5_players", "channel, message): \"\"\" relay a team_chat message, that might be hidden to the", "if none or more than one are found \"\"\" # try a direct", "the plugin's version_information string :param logger: the logger used for logging, usually passed", "a user is authed to the bot :param ctx: the context the trigger", "password: the password to authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx,", "initialize_bot(self, discord_bot): \"\"\" initializes a discord bot with commands and listeners on this", "check whether it should be filtered :return whether the message should not be", "= matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match,", "\"Discord client not connected.\" def run(self): \"\"\" Called when the SimpleAsyncDiscord thread is", "Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled", "= self.discord_kept_topic_suffixes[channel_id] # update the topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix))", "channels. :param caller: the player that initiated the vote :param vote: the vote", "Server Members Intent for the bot in order to be able to replace", "channel = [ch for ch in channel_list if ch.name == match] if len(channel)", "the bot to admin the server. \"\"\" def __init__(self, version_information, logger): \"\"\" Constructor", "search for it in the current topic topic_ending = topic[-10:] for channel_id in", "qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of channel ids for relaying team chat", "- {}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs):", "@property def steam_id(self): return minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def", "> 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data += \"\\n**B:**", "failed. The method sends a corresponding message to the discord relay channels. :param", "the Quake Live chat and configured discord channels. There are two basic types", "for user in member_list if user.nick is not None and user.nick.lower() == match.lower()]", "basic version of a discord plugin: * full relay between Quake Live chat", ":param logger: the logger used for logging, usually passed through from the minqlx", "voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init", "the bot connected. Mainly displays status update from the bot in the game", "i.e. team chat, general chat, etc. \"\"\" # when the message did not", "to check whether it should be filtered :return whether the message should not", "on discord provided. :param channel_ids: the ids of the channels the topic should", "ideally taken from message.clean_content to avoid ids of mentioned users and channels on", "def handle_vote_started(self, caller, vote, args): \"\"\" Handler called when a vote was started.", "By default, this will be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a", "We will set up the bot here with the right commands, and run", "problems to the minqlx.logger \"\"\" pass def _topic_updater(self): try: game = minqlx.Game() except", "self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str))", "as well as the trigger channels, when configured. :param mapname: the new map", "# if we were not provided any channel_ids, do nothing. if not channel_ids", "if channel is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def", "not be sent from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace", "channel, sender, content) async def on_ready(self): \"\"\" Function called once the bot connected.", "and discord, where every text message that is happening is forwarded to the", "to use to connect to discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated list", "a configured relay or triggered channel :param ctx: the context the trigger happened", "Handler called when a vote was passed or failed. The method sends a", "help formatter for the minqlx plugin's bot to provide help information. This is", "reached maximum auth attempts, we will bar her/him from authentication for 5 minutes", "message originate in a configured triggered channel :param ctx: the context the trigger", "here with the right commands, and run the discord.py bot in a new", "self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord", "(default: \"[DISCORD]\") messages from discord to quake live will be prefixed with this", "+= \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates a sorted", "be sent back to Quake Live. \"\"\" sender = author.name if author.nick is", "vote: the vote itself, i.e. map change, kick player, etc. :param args: any", "the trigger) :param channel: the channel the message came through, i.e. team chat,", "that originally sent the message :param channel: the channel the original message came", "sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self):", "Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin):", "\"\"\" import re import asyncio import threading import logging import os from logging.handlers", "for {} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self,", "new topic that should be set. \"\"\" # if we were not provided", "string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for ch in self.discord.get_all_channels()", "separated list of channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix", "escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called", "map factory used \"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self,", "channel_ids on discord provided. :param channel_ids: the ids of the channels the topic", "once the bot connected. Mainly displays status update from the bot in the", "through, i.e. team chat, general chat, etc. \"\"\" # when the message did", "set. \"\"\" # if we were not provided any channel_ids, do nothing. if", "not None and \\ self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name),", "it will be displayed nicely in the Quake Live console. :param channel: the", "with a real mention :param message: the message to replace the channel mentions", "the current game state on triggered relay channels. Your bot needs edit_channel permission", "the bot will respond to !help or responses are completely switched off *", "channel): \"\"\" Handler for reconnecting the discord bot to discord in case it", "channel mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message # this", "to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a discord", "Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\",", "in [game.blue_score, game.red_score] or game.red_score < 0 or game.blue_score < 0: return \"Match", "logfile, and sets the bot to playing Quake Live on discord. \"\"\" self.logger.info(\"Logged", "def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when a player connects. The method", "the now configured bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot):", "name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version information\")) def reply_to_context(self, ctx, message): return", "channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player,", "now configured bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\"", "regular expressions for messages that should not be sent from quake live to", "the message came through, i.e. team chat, general chat, etc. \"\"\" # when", "= \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\" relay", "self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is", "discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set() for item in string_set: if item", "did configured and of the qlx_discordReplaceMentions cvars as '0', you can leave it", "are completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name of", "\"\"\" Handler function for all chat messages on the server. This function will", "provided. :param channel_ids: the ids of the channels the message should be sent", "This function will forward and messages on the Quake Live server to discord.", "error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to respond to from", "player that initiated the vote :param vote: the vote itself, i.e. map change,", "be set. \"\"\" # if we were not provided any channel_ids, do nothing.", "str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled =", "else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler", "status updates are send to discord * triggered relay of specific messages between", "chat between the Quake Live chat and configured discord channels. There are two", "in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx, password: str): \"\"\"", "sent the message :param channel: the channel the original message came through :param", "\"\" for alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def", "are send to discord * triggered relay of specific messages between discord and", "player connects. The method sends a corresponding message to the discord relay channels,", "the player that send to the trigger :param msg: the message the player", "if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully authenticated. \"", "k.score, reverse=True) if limit: players_by_score = players_by_score[:limit] team_data = \"\" for player in", "ctx, *message: str): \"\"\" Relays a message from the triggered channels to minqlx", "= \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided player's name", "player's name for proper formatting to discord (i.e. replace '*' (asterisks) with a", "if channel is None: return None return channel.topic def stop(self): \"\"\" stops the", "from processing. if not message: return # if the bot sent the message", "message to send to the discord channels \"\"\" if not self.is_discord_logged_in(): return #", "mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply =", "None) just list the top players up to the given limit :return: a", "cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\")", "channel): \"\"\" Handler of the !discord command. Forwards any messages after !discord to", "the content of the message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message", "between the Quake Live chat and configured discord channels. There are two basic", "any channel_ids, do nothing. if not channel_ids or len(channel_ids) == 0: return #", "for the minqlx plugin's bot to provide help information. This is a customized", "found \"\"\" # try a direct channel name match case-sensitive first channel =", "Handles the authentication to the bot via private message :param ctx: the context", "message :param ctx: the context of the original message sent for authentication :param", "a corresponding message to the discord relay channels. :param votes: the final votes", "for ch in channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel) == 1: return", "handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \" *(to blue", "right portion # of the triggered relay channels' topics! return \"{0} on **{1}**", "def __init__(self, client, author, discord_channel): self.client = client self.author = author self.discord_channel =", "Boolean flag to indicate whether to update the topic with the current game", "str): \"\"\" Handles exec messages from discord via private message to the bot", "and player is not None: player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member), match))", "(default: 305) Amount of seconds between automatic topic updates * qlx_discordKeptTopicSuffixes (default: {})", "and specific messages from another channel. For a description on how to set", "self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\")", "that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic):", "message :param channel: the channel the original message came through :param message: the", "= self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False,", "message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix !=", "\".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\" Format", "({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError:", "more than one matching member, let's tell the player about this. if len(member)", "\"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is to create a relay", "[ch for ch in channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel) == 1:", "limit=None): \"\"\" generates a sorted output of the team's player by their score", "* qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between automatic topic updates * qlx_discordKeptTopicSuffixes", "mentions. If you don't need that, i.e. you did configured and of the", "return \"Discord connection up and running.\" return \"Discord client not connected.\" def run(self):", "players up to the given limit :return: a discord ready text representation of", "bot :param ctx: the context the trigger happened in \"\"\" return ctx.message.author.id in", "on_message(self, message): \"\"\" Function called once a message is send through discord. Here", "players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True) if limit: players_by_score = players_by_score[:limit] team_data", "connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided player's name for proper", "private chat to the bot :param ctx: the context the trigger happened in", "set of channel_ids on discord provided. :param channel_ids: the ids of the channels", "the current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players =", "information\")) def reply_to_context(self, ctx, message): return ctx.send(message) async def version(self, ctx): \"\"\" Triggers", "args, passed): \"\"\" Handler called when a vote was passed or failed. The", "ctx: the context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async", "on a private chat to the bot :param ctx: the context the trigger", ":return: the original message replaced by properly formatted user mentions \"\"\" if not", "the game is in countdown, i.e. about to start. This function mainly updates", "with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix", "returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find", "state on triggered relay channels. Your bot needs edit_channel permission for these channels.", "= Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool)", "discord bot runs in. :param version_information: the plugin's version_information string :param logger: the", "return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function", "\".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\"", "replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned discord channel (indicated by #channel-hint with", "to the minqlx.logger \"\"\" pass def _topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError:", "the user's nickname member = [user for user in member_list if user.nick is", "new map :param factory: the map factory used \"\"\" content = \"*Changing map", "message that was sent :param channel: the chnannel the message was sent to", "from :return: the topic of the channel \"\"\" channel = self.discord.get_channel(channel_id) if channel", "the same host with the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\")", "of the player's of that team by their score \"\"\" if len(player_list) ==", "self.context.invoked_with return \"Type {0}{1} command for more info on a command.\".format(self.clean_prefix, command_name) async", "\"\"\" Format the channel, author, and content of a message so that it", "\"\"\" Set the topic on a set of channel_ids on discord provided. :param", "self.discord_channel = discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\"", "that case. :return: the matching member, or None if none or more than", "extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"),", "the trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks", "whether a user is authed to the bot :param ctx: the context the", "handler so that no exception is produced for command errors Might be changed", "= \"*Vote passed ({} - {}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1)", "\"\"\" Helper to format the current game.state that may be used in status", "of the team's player by their score :param player_list: the list of players", "loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\" relay a message to the given", "player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg):", "sent from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user", "happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether", "permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and its interactions on the discord", "to make sure, that the connection to discord is properly closed when this", "- **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return \"Match in progress: **{}** -", "team_data = \"\" for player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return", "a corresponding message to the discord relay channels. and updates the relay channel", "id to authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -=", "representation of the player's of that team by their score \"\"\" if len(player_list)", "when this plugin is unloaded. :param plugin: the plugin that was unloaded. \"\"\"", "message to the configured triggered_channel :param player: the player that originally sent the", "discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites the", ":param player: the player that send to the trigger :param msg: the message", "event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a discord bot with commands and", "qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for the discord library (logs to minqlx_discord.log", "def team_data(player_list, limit=None): \"\"\" generates a sorted output of the team's player by", "to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate whether to update the", "\" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__,", "user in member_list if user.name.lower().find(match.lower()) != -1 or (user.nick is not None and", "to from within minqlx for interactions with discord \"\"\" def __init__(self, client, author,", "game.blue_score) if game.state == \"in_progress\": return \"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score)", "@minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when the game is in", "initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)],", "game.red_score] or game.red_score < 0 or game.blue_score < 0: return \"Match ended: **{}**", "player, channel, message): \"\"\" relay a team_chat message, that might be hidden to", "ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether an author is currently", "to the configured relay_channels :param msg: the message to send to the relay", "was sent :param channel: the chnannel the message was sent to \"\"\" handled_channels", "ch in channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel) == 1: return channel[0]", "discord \"\"\" def __init__(self, client, author, discord_channel): self.client = client self.author = author", "whether the message originate in a configured triggered channel :param ctx: the context", "= Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password", "from authentication to the bot :param ctx: the context the trigger happened in", "one, except for the version command related code. The basic ideas for this", "= len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content):", "message, player=None): \"\"\" replaces a mentioned discord channel (indicated by #channel-hint with a", "the context of the original message sent for authentication :param password: the password", "\"_{} called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote,", "self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None and \\", "channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic on the triggered", "bot via private message :param ctx: the context of the original message sent", "len(member) > 1 and player is not None: player.tell(\"Found ^6{}^7 matching discord users", "that represents the current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players())", "if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}:", "will respond to !version or responses are completely switched off * qlx_displayChannelForDiscordRelayChannels (default:", "ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin to your own one,", "i.e. team chat, general chat, etc. \"\"\" if len(msg) > 2 or (len(msg)", "be changed in the future to log those problems to the minqlx.logger \"\"\"", "at the beginning of the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list =", "bot in order to be able to replace discord user mentions. If you", "suffixes and the related suffixes. Make sure to use single quotes for the", "= Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if", "(default: {}) A dictionary of channel_ids for kept topic suffixes and the related", "= self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self,", "player: the player that sent the message :param msg: the message that was", "on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status information\"))", "ignore_extra=False, help=\"display the plugin's version information\")) def reply_to_context(self, ctx, message): return ctx.send(message) async", "to the given channel :param player: the player that originally sent the message", "minqlx.logger \"\"\" pass def _topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError: return topic", "triggered_message(self, player, message): \"\"\" send a triggered message to the configured triggered_channel :param", "map name, which player to kick, etc. :param passed: boolean indicating whether the", "a real mention :param message: the message to replace the channel mentions in", "# guard clause to avoid None messages from processing. if not message: return", "topic suffixes and the related suffixes. Make sure to use single quotes for", "\"\"\" stops the discord client \"\"\" if self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop)", "vote, args): \"\"\" Handler called when a vote was started. The method sends", "\"You have been successfully authenticated. \" \"You can now use {}{} to execute", "string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for user in self.discord.get_all_members()]", "match to look for in the channel name :param channel_list: the list of", "actual cvar values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general", "def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when the game is in countdown,", "-1] if len(channel) == 1: return channel[0] # we found more than one", "to indicate whether to update the topic with the current game state on", "or len(channel_ids) == 0: return # take the final 10 characters from the", "the information from :return: the current text representation of the game state \"\"\"", "# Allow up to 3 attempts for the user's discord id to authenticate.", "int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\")", "you also need to enable the Server Members Intent for the bot in", "File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int)", ":return: the matching member, or None if none or more than one are", "of the nick, if set member = [user for user in member_list if", "\"\"\" Constructor for the SimpleAsyncDiscord client the discord bot runs in. :param version_information:", "discord bot to use to connect to discord. * qlx_discordRelayChannelIds (default: \"\") Comma", "player_data = \"\" teams = Plugin.teams() if len(teams['red']) > 0: player_data += \"\\n**R:**", "def run(self): \"\"\" Called when the SimpleAsyncDiscord thread is started. We will set", "These two modes can be combined, i.e. full relay to a broadcast channel,", "= mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler of", "None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start()", "messages to discord :param msg: the message to send to this channel \"\"\"", "the minqlx plugin. \"\"\" super().__init__() self.version_information = version_information self.logger = logger self.discord =", "avoid None messages from processing. if not message: return # if the bot", "0: return # take the final 10 characters from the topic, and search", "reason_str = \"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name),", "discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\",", "message did not include anything to forward, show the usage help text. if", "handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when the game is in countdown, i.e.", "match to look for in the user name and nick :param member_list: the", "whether the message should not be relayed to discord \"\"\" for message_filter in", "return self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\"", "to the server in a string. The return value may be used for", "from the triggered channels to minqlx :param ctx: the context the trigger happened", "discord plugin: * full relay between Quake Live chat and discord, where every", "to send to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author,", "keeps the topic suffix intact on the configured channels :param channel_ids: the set", "to minqlx_discord.log in the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() # maybe initialize", "is None: return \"No discord connection set up.\" if self.is_discord_logged_in(): return \"Discord connection", "command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel", "return message returned_message = message # this regular expression will make sure that", "message the player sent (includes the trigger) :param channel: the channel the message", "-U discord.py \"\"\" import re import asyncio import threading import logging import os", "the triggered channels to minqlx :param ctx: the context the trigger happened in", "\"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates a sorted output", "self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started)", "sets the bot to playing Quake Live on discord. \"\"\" self.logger.info(\"Logged in to", "Checks whether the given message should be filtered and not be sent to", "maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler", "the given channels \"\"\" # if there are not triggered relay channels configured,", "[\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player:", "trigger happened in \"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async", "update the topic on :param topic: the topic to set on the given", "channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned discord", ":param game: the game object to derive the information from :return: the current", "channels and the triggered channels (when configured), and sends a message to all", "eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled", "are. None is returned in that case. :return: the matching member, or None", "on triggered relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for having the bot", "expression will make sure that the \"#channel\" has at least three characters, and", ")#([^ ]{3,})\") channel_list = [ch for ch in self.discord.get_all_channels() if ch.type in [ChannelType.text,", "The plugin's main purpose is to create a relay chat between the Quake", "name and nick :param member_list: the list of members connected to the discord", "qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the trigger on triggered relay channels. *", "^\\!p$\") comma separated list of regular expressions for messages that should not be", "def find_user_that_matches(match, member_list, player=None): \"\"\" find a user that matches the given match", "message :param msg: the message that was sent :param channel: the chnannel the", "asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message was", "in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0: await", "chat, general chat, etc. \"\"\" if len(msg) > 2 or (len(msg) == 2", "(default: \"qlx\") command for authenticated users to execute server commands from discord *", "threading import logging import os from logging.handlers import RotatingFileHandler import minqlx from minqlx", "to communicate to discord, and provides certain commands in the relay and triggered", "happened in \"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def", "reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler called when a map is", "of the discord bot to use to connect to discord. * qlx_discordRelayChannelIds (default:", "minqlx.AbstractChannel): \"\"\" Handler function for all chat messages on the server. This function", ":param match: the match to look for in the channel name :param channel_list:", "import os from logging.handlers import RotatingFileHandler import minqlx from minqlx import Plugin import", "= self.context.invoked_with return \"Type {0}{1} command for more info on a command.\".format(self.clean_prefix, command_name)", "to minqlx :param ctx: the context the trigger happened in :param message: the", "ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed],", "not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return", "\"\") Comma separated list of channel ids where the topic suffix will be", "self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin):", "self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions =", "You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum auth", "(asterisks) with a variant to not interfere with discord's formattings.) :param text: the", "up.\" if self.is_discord_logged_in(): return \"Discord connection up and running.\" return \"Discord client not", "discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True,", "to playing Quake Live on discord. \"\"\" self.logger.info(\"Logged in to discord as: {}", "topic: the new topic that should be set. \"\"\" # if we were", "and search for it in the current topic topic_ending = topic[-10:] for channel_id", "overwrites the player.tell function to relay messages to discord :param msg: the msg", "discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel],", "is_authed(self, ctx): \"\"\" Checks whether a user is authed to the bot :param", "pip install -U discord.py \"\"\" import re import asyncio import threading import logging", "you may need to change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to", "ctx): \"\"\" Checks whether an author is currently barred from authentication to the", "def is_discord_logged_in(self): if self.discord is None: return False return not self.discord.is_closed() and self.discord.is_ready()", "in the user name and nick :param member_list: the list of members connected", "and #channel) for triggered messages sent towards the triggered channels * qlx_discordAdminPassword (default", "this player is told what the alternatives are. None is returned in that", "match.lower()] if len(member) == 1: return member[0] # if direct searches for the", "of mentioned users and channels on the discord server. :return: the formatted message", "plugin: * full relay between Quake Live chat and discord, where every text", "== self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the text for the", "that may be sent back to Quake Live. \"\"\" sender = author.name if", "'*' (asterisks) with a variant to not interfere with discord's formattings.) :param text:", "command for more info on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass", ":return: the original message replaced by properly formatted channel mentions \"\"\" if not", "the default command error handler so that no exception is produced for command", "| self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers game status information sent towards", "plugin created by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to", "with this text portion. Useful when running multiple servers on the same host", "\"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar values", "self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided player's name for proper formatting", "\"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg) == 2", "discord is properly closed when this plugin is unloaded. :param plugin: the plugin", "= Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat", "to discord in case it gets disconnected. :param player: the player that send", "hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute", "back to Quake Live. \"\"\" sender = author.name if author.nick is not None:", "passwort for remote admin of the server via discord private messages to the", "Escapes the provided player's name for proper formatting to discord (i.e. replace '*'", "if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self,", "\"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content =", "in a configured relay or triggered channel :param ctx: the context the trigger", "topic that represents the current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players =", "updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between automatic topic updates *", "of that team by their score \"\"\" if len(player_list) == 0: return \"\"", "to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx", "for player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self,", "Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if", "unloaded to make sure, that the connection to discord is properly closed when", "(@user and #channel) for triggered messages sent towards the triggered channels * qlx_discordAdminPassword", "message * qlx_discordExecPrefix (default: \"qlx\") command for authenticated users to execute server commands", "etc. :param args: any arguments of the vote, i.e. map name, which player", "usually passed through from the minqlx plugin. \"\"\" super().__init__() self.version_information = version_information self.logger", "\"in_progress\": return \"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def", "clause to avoid None messages from processing. if not message: return # if", "users and channels on the discord server. :return: the formatted message that may", "len(channel) > 1 and player is not None: player.tell(\"Found ^6{}^7 matching discord channels", "with a variant to not interfere with discord's formattings.) :param text: the text", "if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the", "a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self):", "be combined, i.e. full relay to a broadcast channel, and specific messages from", "with the right commands, and run the discord.py bot in a new event_loop", "of the channel \"\"\" channel = self.discord.get_channel(channel_id) if channel is None: return None", "returned in that case. :return: the matching channel, or None if none or", "ch in channel_list if ch.name == match] if len(channel) == 1: return channel[0]", "matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for user in self.discord.get_all_members()] matches", "messages and used in topics to indicate reveal more data about the server", "loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True,", "for 5 minutes (300 seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication attempts", ":param ctx: the context the trigger happened in :param message: the message to", "= self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly set the topic", "from discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY =", "Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\")", "discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize =", "trigger :param msg: the message the player sent (includes the trigger) :param channel:", "is used to communicate to discord, and provides certain commands in the relay", "\"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\" Handler called", "None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None: return False return", "Helper function to update the topics on all the relay and all the", "or portions of the nick, if set member = [user for user in", "@staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the text for the topic set on", "the topic that represents the current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players", "be filtered :return whether the message should not be relayed to discord \"\"\"", "the context the trigger happened in \"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id]", "from discord import ChannelType, AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks", "and channels on the discord server. :return: the formatted message that may be", "minqlx.Player): \"\"\" Handler called when a player connects. The method sends a corresponding", "member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not None: returned_message = returned_message.replace(\"@{}\".format(match),", "user in member_list if user.name.lower() == match.lower()] if len(member) == 1: return member[0]", "to look for in the channel name :param channel_list: the list of channels", "install discord.py in your python installation, i.e. python3 -m pip install -U discord.py", "back to Quake Live. if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if len(content)", "self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True,", "not None: sender = author.nick if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return", "channel name :param channel_list: the list of channels connected to the discord server", "triggered channels (when configured), and sends a message to all relay channels. \"\"\"", "the messages to be forwarded. These two modes can be combined, i.e. full", "discord :param msg: the message to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg))", "\"*Vote passed ({} - {}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def", "None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\"", "3 attempts for the user's discord id to authenticate. if ctx.message.author.id not in", "# adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST)", "channel that matches the given match :param match: the match to look for", "channel[0] # we found more than one matching channel, let's tell the player", "text for the topic set on discord channels. :param game: the game to", "servers on the same host with the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels", "topic on a set of channel_ids on discord provided. :param channel_ids: the ids", "maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\")", "relay_message(self, msg): \"\"\" relay a message to the configured relay_channels :param msg: the", "given channels \"\"\" # if there are not triggered relay channels configured, do", "mentions used, this player is told what the alternatives are. None is returned", "returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel", "def escape_text_for_discord(text): \"\"\" Escapes the provided player's name for proper formatting to discord", "mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content = \"_{} called a vote: {}", "game is None: return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def", "author self.discord_channel = discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg):", "self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger", "author is currently barred from authentication to the bot :param ctx: the context", "**{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return \"Match in progress: **{}** - **{}**\".format(game.red_score,", "def triggered_chat(self, ctx, *message: str): \"\"\" Relays a message from the triggered channels", "channels :param topic: the topic to set on all the channels \"\"\" if", "in channel_list if ch.name == match] if len(channel) == 1: return channel[0] #", "AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY", "connected to the server in a string. The return value may be used", "the !discord command. Forwards any messages after !discord to the discord triggered relay", "output of the team's player by their score :param player_list: the list of", "Live chat and configured discord channels. There are two basic types of relay", "* qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list of regular expressions for messages", "\"\"\" overwrites the player.tell function to relay messages to discord :param msg: the", "trigger happened in :param qlx_command: the command that was sent by the user", "variant to not interfere with discord's formattings.) :param text: the text that shall", "plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose", "topics. :param game: the game object to derive the information from :return: the", "({} - {}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args,", "self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0}", "player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content)", "from :return: the topic that represents the current game state. \"\"\" ginfo =", "relay channel topic as well as the trigger channels, when configured. :param mapname:", "message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def", "bot to playing Quake Live on discord. \"\"\" self.logger.info(\"Logged in to discord as:", "import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class", "({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler", "leave it unchecked. By default, this will be enabled and therefore mandatory. Check", "asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None: return False return not self.discord.is_closed()", "to not interfere with discord's formattings.) :param text: the text that shall be", "self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec messages", "are found \"\"\" # try a direct channel name match case-sensitive first channel", "the usage help text. if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:])))", "replaced by properly formatted user mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message", "\"\"\" relay a team_chat message, that might be hidden to the given channel", "\"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\")", "= SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention)", "happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str):", "a mentioned discord user (indicated by @user-hint with a real mention :param message:", "from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for the discord library", ":param channel: the channel the message came through, i.e. team chat, general chat,", "self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids", "the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion #", "forward, show the usage help text. if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player,", "connection up and running.\" return \"Discord client not connected.\" def run(self): \"\"\" Called", "self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\",", "def on_ready(self): \"\"\" Function called once the bot connected. Mainly displays status update", "top 5 scorers connected to the server in a string. The return value", "is unloaded. :param plugin: the plugin that was unloaded. \"\"\" if plugin ==", "self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx,", "to install discord.py in your python installation, i.e. python3 -m pip install -U", "\"\"\" def __init__(self, version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord client the discord", "any messages after !discord to the discord triggered relay channels. :param player: the", "a real mention :param message: the message to replace the user mentions in", "version information\")) def reply_to_context(self, ctx, message): return ctx.send(message) async def version(self, ctx): \"\"\"", "# prefixed by a space or at the beginning of the string matcher", "return True return False def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler", "\"{0} on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod", "up a bot for you discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As", "user mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message # this", "than one are found \"\"\" # try a direct channel name match case-sensitive", "None and \\ self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message)", "to the discord server :param player: (default: None) when several alternatives are found", "from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on the fragstealers_inc", ":param player: the player that originally sent the message :param message: the content", "Members Intent for the bot in order to be able to replace discord", "CAUTION: if you change anything on the next line, you may need to", "towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and #channel) for", "= \"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str)", "factory: the map factory used \"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content)", "len(user_match), reverse=True): if match in [\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list,", "message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned discord user", "triggered_channel :param player: the player that originally sent the message :param message: the", "channels for #{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel in channel: alternatives +=", "the triggered relay channels' topics! return \"{0} on **{1}** ({2}) with **{3}/{4}** players.", "1.5 of the mydiscordbot, you also need to enable the Server Members Intent", "created by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify", "ctx: the context the trigger happened in \"\"\" return ctx.message.author.id in self.auth_attempts and", "0 async def auth(self, ctx, password: str): \"\"\" Handles the authentication to the", "= self.discord.get_channel(channel_id) if channel is None: return None return channel.topic def stop(self): \"\"\"", "error handler so that no exception is produced for command errors Might be", "\"\"\" overwrites the channel.reply function to relay messages to discord :param msg: the", "now use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to", "one matching channel, let's tell the player about this. if len(channel) > 1", "+= \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player, message): \"\"\" send a", "== 1: return list(member)[0] # we found more than one matching member, let's", "content) def replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned discord user (indicated by", "plugin. \"\"\" super().__init__() self.version_information = version_information self.logger = logger self.discord = None self.authed_discord_ids", "channels \"\"\" if not self.is_discord_logged_in(): return # if we were not provided any", "message returned_message = message # this regular expression will make sure that the", "server commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for the", "emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False,", "ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True):", "passed through from the minqlx plugin. \"\"\" super().__init__() self.version_information = version_information self.logger =", "between Quake Live chat and discord, where every text message that is happening", "a corresponding message to the discord relay channels, and updates the relay channel", "Quake Live on discord. \"\"\" self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id))", "the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display", "one matching member, let's tell the player about this. if len(member) > 1", "SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return", "the context the trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx):", "= \"\" for player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data", "is a plugin created by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are", "name, which player to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else", "-1 or (user.nick is not None and user.nick.lower().find(match.lower()) != -1)] if len(member) ==", "if item == '': continue value = int(item) int_set.add(value) return int_set def status(self):", "the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a", "Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\")", "send the current status of the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages", "with the current game state on triggered relay channels. Your bot needs edit_channel", "if not message: return # if the bot sent the message himself, do", "channel is None: return None return channel.topic def stop(self): \"\"\" stops the discord", "beginning of the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list = [ch for", "\"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\",", "channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)),", "current game. :return: string of the current top5 scorers with the scores and", "Live on discord. \"\"\" self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected", "corresponding message to the discord relay channels. and updates the relay channel topic", ":param msg: the message to send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg)", "a direct match at the user's nickname member = [user for user in", "DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e: send_message", "overrides the default command error handler so that no exception is produced for", "function for all chat messages on the server. This function will forward and", "given channel :param player: the player that originally sent the message :param channel:", "channel_list = [ch for ch in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]]", "a discord ready text representation of the player's of that team by their", "= self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player,", "unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate", "in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether an author is currently barred", "user is authed to the bot :param ctx: the context the trigger happened", "is None: return False return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic):", "the channel name of the discord channel for configured relay channels * qlx_discordQuakeRelayMessageFilters", "self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids", "(default: \"auth\") command for authenticating a discord user to the plugin via private", "if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match in sorted(matches,", "import re import asyncio import threading import logging import os from logging.handlers import", "def reply_to_context(self, ctx, message): return ctx.send(message) async def version(self, ctx): \"\"\" Triggers the", "list of players to generate the team output for :param limit: (default: None)", "log those problems to the minqlx.logger \"\"\" pass def _topic_updater(self): try: game =", "set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids =", "should be filtered and not be sent to discord. :param msg: the message", "ctx): \"\"\" Triggers game status information sent towards the originating channel :param ctx:", "self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix,", "Formats the top 5 scorers connected to the server in a string. The", "plugin is unloaded to make sure, that the connection to discord is properly", "relay to a broadcast channel, and specific messages from another channel. For a", "pass_context=True, help=\"send [message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version,", "look for in the user name and nick :param member_list: the list of", "command_name = self.context.invoked_with return \"Type {0}{1} command for more info on a command.\".format(self.clean_prefix,", "[user for user in member_list if user.name.lower() == match.lower()] if len(member) == 1:", "were not provided any channel_ids, do nothing. if not channel_ids or len(channel_ids) ==", "message to replace the channel mentions in :param player: (default: None) when several", "ids of the channels the topic should be set upon. :param topic: the", "happening is forwarded to the other system, and some basic Quake Live status", "(c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin to your", "points either back to Quake Live or discord happen. :param message: the message", "kept upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between automatic topic", "self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A", "returned_message = message # this regular expression will make sure that the \"#channel\"", "chat where a prefix needs to be used for the messages to be", "messages between discord and Quake Live chat where a prefix needs to be", "for the messages to be forwarded. These two modes can be combined, i.e.", "len(channel) == 1: return channel[0] # we found more than one matching channel,", "given match :param match: the match to look for in the channel name", "self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\")", "item == '': continue value = int(item) int_set.add(value) return int_set def status(self): if", ":param member_list: the list of members connected to the discord server :param player:", "self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a discord bot with commands and listeners", "author.nick is not None: sender = author.nick if not self.discord_show_relay_channel_names and channel.id in", "and player is not None: player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel), match))", "user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self,", "whether a message was sent on a private chat to the bot :param", "the configured channels :param channel_ids: the set of channels to update the topic", "= self.get_channel_topic(channel_id) if previous_topic is None: previous_topic = topic # preserve the original", "shall be escaped for discord chat channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text", "and of the qlx_discordReplaceMentions cvars as '0', you can leave it unchecked. By", "messages from another channel. For a description on how to set up a", "in \"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self,", "sure that the \"#channel\" has at least three characters, and is either #", "the msg to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\"", "respond to !help or responses are completely switched off * qlx_discordEnableVersion (default: \"1\")", "sent to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\":", "failed, i.e. map change, kick player, etc. :param args: any arguments of the", "= \"\" teams = Plugin.teams() if len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red']))", "User has reached maximum auth attempts, we will bar her/him from authentication for", "setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs =", "called once the bot connected. Mainly displays status update from the bot in", "messages to discord \"\"\" def __init__(self, client, author, discord_channel): self.client = client self.author", "have been successfully authenticated. \" \"You can now use {}{} to execute commands.\"", "topic): \"\"\" Updates the topic on the given channels and keeps the topic", "if len(channel) == 1: return channel[0] # then try a case-insensitive direct match", "\"0\") # get the actual cvar values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\",", "status information from :return: the topic that represents the current game state. \"\"\"", "self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool)", "player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return", "the topic, and search for it in the current topic topic_ending = topic[-10:]", "some basic Quake Live status updates are send to discord * triggered relay", "ctx: the context the trigger happened in \"\"\" try: game = minqlx.Game() ginfo", "= previous_topic[position + len(topic_ending):] if position != -1 else previous_topic if channel_id in", "as the trigger channels, when configured. :param mapname: the new map :param factory:", "the channel \"\"\" channel = self.discord.get_channel(channel_id) if channel is None: return None return", "* qlx_discordCommandPrefix (default: \"!\") Command prefix for all commands from discord * qlx_discordTriggerTriggeredChannelChat", "prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author,", "self.discord.get_channel(channel_id) if channel is None: return None return channel.topic def stop(self): \"\"\" stops", "this plugin to your own one, except for the version command related code.", "of the vote, i.e. map name, which player to kick, etc. :param passed:", "this plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed", "None return channel.topic def stop(self): \"\"\" stops the discord client \"\"\" if self.discord", "return # take the final 10 characters from the topic, and search for", "self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers game status information sent", "channel_ids, topic): \"\"\" Updates the topic on the given channels and keeps the", "forward and messages on the Quake Live server to discord. :param player: the", "if len(msg) == 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to", "discord :param ctx: the context the trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information))", "^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self): \"\"\" Function called once the", "discord user (indicated by @user-hint with a real mention :param message: the message", "Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\",", "basic Quake Live status updates are send to discord * triggered relay of", "{}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates a sorted output of", "that send to the trigger :param msg: the message the player sent (includes", "the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def", "do nothing. if message.author == self.discord.user: return # relay all messages from the", "from QL with this text portion. Useful when running multiple servers on the", "team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name not in handled_channels: return if self.is_filtered_message(msg):", "game: the game to derive the status information from :return: the topic that", "pass def _topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game)", "= discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property def channel(self): return DiscordChannel(self.client,", "== 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord()", "Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set(", "def is_filtered_message(self, msg): \"\"\" Checks whether the given message should be filtered and", "should not be relayed to discord \"\"\" for message_filter in self.discord_message_filters: matcher =", "the next line, you may need to change the topic_ending logic in #", "member_list, player=None): \"\"\" find a user that matches the given match :param match:", "of the qlx_discordReplaceMentions cvars as '0', you can leave it unchecked. By default,", "vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler called when", "called when a map is changed. The method sends a corresponding message to", "came through, i.e. team chat, general chat, etc. \"\"\" # when the message", "is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message originate in a configured triggered channel", "in the future to log those problems to the minqlx.logger \"\"\" pass def", "return if len(msg) == 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting", "self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the minqlx plugin's bot to", "suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix for all commands from discord *", "when a vote was started. The method sends a corresponding message to the", "channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None: return", "self.discord_exec_prefix)) return # Allow up to 3 attempts for the user's discord id", "to discord, and provides certain commands in the relay and triggered channels as", "= {\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \" *(to blue team)*\",", "300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You will be barred from", "game.state == \"in_progress\": return \"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\"", "this pseudo cog class :param discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command,", "len(member) == 1: return member[0] # if direct searches for the match fail,", "message should be filtered and not be sent to discord. :param msg: the", "channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites the player.tell function", "in self.discord_relay_channel_ids: content = message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content))", "basic ideas for this plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have", "= Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids =", "the password to authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You", "used \"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote,", "a triggered message to the configured triggered_channel :param player: the player that originally", "ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e: send_message = ctx.send(\"{}:", "in its own thread to avoid blocking of the server for channel_id in", "\"\"\" send a triggered message to the configured triggered_channel :param player: the player", "install -U discord.py \"\"\" import re import asyncio import threading import logging import", "**{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game):", "user name and nick :param member_list: the list of members connected to the", "\"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar values from the", "max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply", "if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path =", "client self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner()", "formattings.) :param text: the text that shall be escaped for discord chat channels", "from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and", "python3 -m pip install -U discord.py \"\"\" import re import asyncio import threading", "player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for all chat messages on", "\"\"\" Function called once the bot connected. Mainly displays status update from the", "def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic on a set of channel_ids", "None) when several alternatives are found for the mentions used, this player is", "Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File", "\"\"\" # if there are not triggered relay channels configured, do nothing. if", "called when a player connects. The method sends a corresponding message to the", "topic suffix intact on the configured channels :param channel_ids: the set of channels", "channels \"\"\" # if there are not triggered relay channels configured, do nothing.", "indicate whether to update the topic with the current game state on triggered", "usage help text. if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message", "matcher.findall(returned_message) for match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list,", "server. \"\"\" def __init__(self, version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord client the", "< 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def", "a string. The return value may be used for status messages and used", "running multiple servers on the same host with the same discord connected to.", "channels. Your bot needs edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\")", "for authenticated users to execute server commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\")", "updates the topics of the relay channels and the triggered channels (when configured),", "\"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel,", "the channels the topic should be set upon. :param topic: the new topic", "if we were not provided any channel_ids, do nothing. if not channel_ids or", "{}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to 3 attempts", "game.state that may be used in status messages and setting of channel topics.", "return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when", "if caller else \"The server\" content = \"_{} called a vote: {} {}_\".format(caller_name,", "the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\",", "\"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content = \"_{} called", "through from the minqlx plugin. \"\"\" super().__init__() self.version_information = version_information self.logger = logger", "logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set):", "> 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception, ctx): \"\"\" overrides", "ctx: the context the trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self,", "game: the game object to derive the information from :return: the current text", "to avoid blocking of the server for channel_id in channel_ids: channel = self.discord.get_channel(channel_id)", "@{}:\".format(len(member), match)) alternatives = \"\" for alternative_member in member: alternatives += \"@{} \".format(alternative_member.name)", "that it will be displayed nicely in the Quake Live console. :param channel:", "Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler", "self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the text for the topic set", "Intent for the bot in order to be able to replace discord user", "arguments of the vote, i.e. map name, which player to kick, etc. \"\"\"", "minqlx.Player, reason): \"\"\" Handler called when a player disconnects. The method sends a", "the current game.state that may be used in status messages and setting of", "configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list of regular", "channel, author, and content of a message so that it will be displayed", "\"No discord connection set up.\" if self.is_discord_logged_in(): return \"Discord connection up and running.\"", "message.author, content)) async def on_command_error(self, exception, ctx): \"\"\" overrides the default command error", "of the current top5 scorers with the scores and connection time to the", "**{}** - **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return \"Match in progress: **{}**", "chat channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text", "of version 1.5 of the mydiscordbot, you also need to enable the Server", "bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix", "async def on_message(self, message): \"\"\" Function called once a message is send through", "live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and #channel) for", "pseudo cog class :param discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message,", "2 and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return", "portions of the nick, if set member = [user for user in member_list", "the message should be sent to. :param content: the content of the message", "votes, vote, args, passed): \"\"\" Handler called when a vote was passed or", "trigger happened in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message:", "the triggered channels :param topic: the topic to set on all the channels", "and is either # prefixed by a space or at the beginning of", "the Server Members Intent for the bot in order to be able to", "minqlx dummy player class to relay messages to discord \"\"\" def __init__(self, client,", "and #channel) for messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace", "minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\",", "discord server if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord =", "\" *(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name not in handled_channels:", "initializes a discord bot with commands and listeners on this pseudo cog class", "on a set of channel_ids on discord provided. :param channel_ids: the ids of", "qlx_discordAuthCommand (default: \"auth\") command for authenticating a discord user to the plugin via", "return channel[0] # then we try a match with portions of the channel", "in string_set: if item == '': continue value = int(item) int_set.add(value) return int_set", "match in [\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member", "messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and", "ctx, message): return ctx.send(message) async def version(self, ctx): \"\"\" Triggers the plugin's version", "be relayed to discord \"\"\" for message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if", "self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def", "on the server. This function will forward and messages on the Quake Live", "relay channels, and updates the relay channel topic as well as the trigger", "so that it will be displayed nicely in the Quake Live console. :param", "305) Amount of seconds between automatic topic updates * qlx_discordKeptTopicSuffixes (default: {}) A", "is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list,", "file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize,", "message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None", "return # send the message in its own thread to avoid blocking of", "is happening is forwarded to the other system, and some basic Quake Live", "continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not None: returned_message =", "message: the content of the message \"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions:", "on discord channels. :param game: the game to derive the status information from", "is not None: player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member), match)) alternatives =", "for remote admin of the server via discord private messages to the discord", "content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def on_ready(self): \"\"\"", "triggered relay of specific messages between discord and Quake Live chat where a", "self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def on_message(self, message): \"\"\" Function called once a message", "replace the user mentions in :param player: (default: None) when several alternatives are", "filtered :return whether the message should not be relayed to discord \"\"\" for", "either back to Quake Live or discord happen. :param message: the message that", "self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\",", "return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender,", "needs to be used for the messages to be forwarded. These two modes", "{}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler called when a map", "player: the player that send to the trigger :param msg: the original message", "status update from the bot in the game console and server logfile, and", "Live server to discord. :param player: the player that sent the message :param", "as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async def", "os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s)", "handled_channels: return if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg))", "{} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx,", "provided player's name for proper formatting to discord (i.e. replace '*' (asterisks) with", "sent by the user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel),", "space or at the beginning of the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\")", "channel topic as well as the trigger channels, when configured. :param player: the", "ctx): \"\"\" Triggers the plugin's version information sent to discord :param ctx: the", "\".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper to format the", "discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to", "help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands", "position != -1 else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] #", "Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class to relay messages to", "the context the trigger happened in \"\"\" try: game = minqlx.Game() ginfo =", "passed ({} - {}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self,", "return player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates a sorted output of the", "which player to kick, etc. :param passed: boolean indicating whether the vote passed", "to the trigger :param msg: the original message the player sent (includes the", "output. \"\"\" command_name = self.context.invoked_with return \"Type {0}{1} command for more info on", "\"\"\" a minqlx channel class to respond to from within minqlx for interactions", "message): return ctx.send(message) async def version(self, ctx): \"\"\" Triggers the plugin's version information", "bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on the server\"))", "update the topic with the current game state on triggered relay channels. Your", "to relay messages to discord :param msg: the message to send to this", "relay or triggered channel :param ctx: the context the trigger happened in \"\"\"", "text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player,", "previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if position != -1 else previous_topic if", "real mention :param message: the message to replace the channel mentions in :param", "modify this plugin to your own one, except for the version command related", "member_list if user.name.lower() == match.lower()] if len(member) == 1: return member[0] # then", "\"Discord connection up and running.\" return \"Discord client not connected.\" def run(self): \"\"\"", "else game.map gametype = game.type_short.upper() # CAUTION: if you change anything on the", "right commands, and run the discord.py bot in a new event_loop until completed.", "{}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\" relay a team_chat", "channels the topic should be set upon. :param topic: the new topic that", "to !help or responses are completely switched off * qlx_discordEnableVersion (default: \"1\") indicates", "topic on :param topic: the topic to set on the given channels \"\"\"", "k: k.score, reverse=True) if limit: players_by_score = players_by_score[:limit] team_data = \"\" for player", "\"\"\" for message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return True return", "default, this will be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description.", "SimpleAsyncDiscord client the discord bot runs in. :param version_information: the plugin's version_information string", "reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the bot, and init the", "send to discord * triggered relay of specific messages between discord and Quake", "send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message to a set of channel_ids on", "to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def", "discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version information\"))", "the message to send to the discord channels \"\"\" if not self.is_discord_logged_in(): return", "interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord =", "self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix =", "the main interaction points either back to Quake Live or discord happen. :param", "discord.py bot in a new event_loop until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop)", "topic on the given channels and keeps the topic suffix intact on the", "\"\" teams = Plugin.teams() if len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if", "== match.lower()] if len(member) == 1: return member[0] # if direct searches for", "def int_set(string_set): int_set = set() for item in string_set: if item == '':", "(default: \"1\") replace mentions (@user and #channel) for messages sent towards relay channels", "(when configured), and sends a message to all relay channels. \"\"\" game =", "replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned discord user (indicated by @user-hint with", "when configured. :param player: the player that connected :param reason: the reason why", "you can leave it unchecked. By default, this will be enabled and therefore", "\"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt", "sent towards the originating channel :param ctx: the context the trigger happened in", "= self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message)", "previous_topic = self.get_channel_topic(channel_id) if previous_topic is None: previous_topic = topic # preserve the", "return member[0] # if direct searches for the match fail, we try to", "in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player, message):", "return team_data def is_filtered_message(self, msg): \"\"\" Checks whether the given message should be", "channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3)", "the alternatives are. None is returned in that case. :return: the matching member,", "to update the topic with the current game state on triggered relay channels.", "this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class", "replaced by properly formatted channel mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message", "= author.name if author.nick is not None: sender = author.nick if not self.discord_show_relay_channel_names", "version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord client the discord bot runs in.", "ready text representation of the player's of that team by their score \"\"\"", "discord tech channel of the Bus Station server(s). You need to install discord.py", "**kwargs): \"\"\" Handler called when the game is in countdown, i.e. about to", "in status messages and setting of channel topics. :param game: the game object", "the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin of the", "setting of channel topics. :param game: the game object to derive the information", "as the trigger channels, when configured. :param player: the player that connected :param", "searches for the match fail, we try to match portions of the name", "channel, or None if none or more than one are found \"\"\" #", "we try a match with portions of the channel name channel = [ch", "game state on triggered relay channels. Your bot needs edit_channel permission for these", "* qlx_discordTriggerStatus (default: \"status\") Trigger for having the bot send the current status", "message came through, i.e. team chat, general chat, etc. \"\"\" # when the", "def handle_map(self, mapname, factory): \"\"\" Handler called when a map is changed. The", "Quake Live. if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if len(content) > 0:", "game is in countdown, i.e. about to start. This function mainly updates the", "ending_note for the help output. \"\"\" command_name = self.context.invoked_with return \"Type {0}{1} command", "discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO)", "completely switched off * qlx_discordEnableVersion (default: \"1\") indicates whether the bot will respond", "of players to generate the team output for :param limit: (default: None) just", "the trigger happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\"", "message: the message to replace the user mentions in :param player: (default: None)", "the current topic topic_ending = topic[-10:] for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id)", "successfully authenticated. \" \"You can now use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix))", "channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class to", "\"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return \"Match in", "or game.red_score < 0 or game.blue_score < 0: return \"Match ended: **{}** -", "Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used to communicate to discord,", "function to relay messages to discord :param msg: the message to send to", "connect the now configured bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self,", "member is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match,", "self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix on the channels that", "in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether a message", "update the topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id):", "ch in channel_list if ch.name.lower() == match.lower()] if len(channel) == 1: return channel[0]", "maximum auth attempts, we will bar her/him from authentication for 5 minutes (300", "list of channel ids for relaying team chat messages. * qlx_discordTriggeredChannelIds (default: \"\")", "directly set the topic on channels with no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids,", "returned_message = message # this regular expression will make sure that the \"@user\"", "discord channels. There are two basic types of relay in this basic version", "make sure, that the connection to discord is properly closed when this plugin", "content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler called", "1: return channel[0] # we found more than one matching channel, let's tell", "bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\",", "get_ending_note(self): \"\"\" Provides the ending_note for the help output. \"\"\" command_name = self.context.invoked_with", "relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for having the bot send the", "channel, and specific messages from another channel. For a description on how to", "return \"Warmup\" if game.state == \"countdown\": return \"Match starting\" if game.roundlimit in [game.blue_score,", "* qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions (@user and #channel) for messages sent towards", "return if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return", "team_data def is_filtered_message(self, msg): \"\"\" Checks whether the given message should be filtered", "triggered relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for having the bot send", "on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a", "self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used to communicate to", ":param message: the message that was sent. \"\"\" # guard clause to avoid", "the game object to derive the information from :return: the current text representation", "\"\"\" if not self.is_discord_logged_in(): return # if we were not provided any channel_ids,", "unchecked. By default, this will be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for", "a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx", "msg): \"\"\" relay a message to the configured relay_channels :param msg: the message", "vote: the initial vote that passed or failed, i.e. map change, kick player,", "sent back to Quake Live. \"\"\" sender = author.name if author.nick is not", "= \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when", "game_status_information(game: minqlx.Game): \"\"\" Generate the text for the topic set on discord channels.", "up to 3 attempts for the user's discord id to authenticate. if ctx.message.author.id", "the message :param channel: the channel the original message came through :param message:", "= SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set))", "overwrites the channel.reply function to relay messages to discord :param msg: the message", "1: return list(member)[0] # we found more than one matching member, let's tell", "server in a string. The return value may be used for status messages", "the text that shall be escaped for discord chat channels \"\"\" escaped_text =", "users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\" relay a message to", ":return: string of the current top5 scorers with the scores and connection time", "to discord is properly closed when this plugin is unloaded. :param plugin: the", "return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <= 0 async def auth(self, ctx, password:", "is not None: sender = author.nick if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids:", "not in handled_channels: return if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player,", "message was either sent in a configured relay or triggered channel :param ctx:", "for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of channel ids", "member[0] # if direct searches for the match fail, we try to match", "priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot", "^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\")", "change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion", "game.map gametype = game.type_short.upper() reply = \"{0} on **{1}** ({2}) with **{3}/{4}** players.", "\"\"\" a minqlx dummy player class to relay messages to discord \"\"\" def", "blocking of the server for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel", "run the discord.py bot in a new event_loop until completed. \"\"\" loop =", "discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether a user is authed to the", "previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic on", "discord server :param player: (default: None) when several alternatives are found for the", "to derive the status information from :return: the topic that represents the current", "whether to update the topic with the current game state on triggered relay", "list the top players up to the given limit :return: a discord ready", ":param channel: the channel the original message came through :param message: the content", "to avoid ids of mentioned users and channels on the discord server. :return:", "topic in its own thread to avoid blocking of the server for channel_id", "channel_id): \"\"\" get the topic of the provided channel id :param channel_id: the", "if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the text", "plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the text for", "the final votes :param vote: the initial vote that passed or failed, i.e.", "plugin): \"\"\" Handler when a plugin is unloaded to make sure, that the", "(default: None) when several alternatives are found for the mentions used, this player", "name of the discord channel for configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$,", "re.compile(message_filter) if matcher.match(msg): return True return False def handle_ql_chat(self, player: minqlx.Player, msg, channel:", "include anything to forward, show the usage help text. if len(msg) < 2:", "the connection to discord is properly closed when this plugin is unloaded. :param", "Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now configured bot to", "to Quake Live. \"\"\" sender = author.name if author.nick is not None: sender", "relay channels and the triggered channels (when configured), and sends a message to", "default command error handler so that no exception is produced for command errors", "server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\",", "previous_topic = topic # preserve the original channel's topic. position = previous_topic.find(topic_ending) topic_suffix", "(logs to minqlx_discord.log in the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() # maybe", "in topics to indicate reveal more data about the server and its current", "content of a message so that it will be displayed nicely in the", "bot send the current status of the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\")", "try a direct channel name match case-sensitive first channel = [ch for ch", "the plugin via private message * qlx_discordExecPrefix (default: \"qlx\") command for authenticated users", "happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether a", "This is a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\")", "version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when a", "configured bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes", "logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion # of the", "replace mentions (@user and #channel) for messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages", "to the bot via private message :param ctx: the context of the original", ":param channel_ids: the set of channels to update the topic on :param topic:", "self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and its", "== 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord()", "cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler of the !discord command. Forwards any", "ctx.message.author, ctx.message.channel)) except Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop)", "on the given channels \"\"\" # if there are not triggered relay channels", "\"timed out\", \"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str = \"was", "You need to install discord.py in your python installation, i.e. python3 -m pip", "# relay all messages from the relay channels back to Quake Live. if", "messages from discord to quake live will be prefixed with this prefix *", "minqlx :param ctx: the context the trigger happened in :param message: the message", "player=None): \"\"\" replaces a mentioned discord channel (indicated by #channel-hint with a real", "\"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\")", "to authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been", "Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no game running.\"", "a message to all relay channels. \"\"\" game = self.game if game is", "the message came from. :param author: the author of the original message. :param", "(default: \"0\") enables extended logging for the discord library (logs to minqlx_discord.log in", "on all the channels \"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids =", "server \"\"\" player_data = \"\" teams = Plugin.teams() if len(teams['red']) > 0: player_data", "authentication :param password: the password to authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id)", "self.discord_kept_topic_suffixes[channel_id] # update the topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def", "a message to the given channel :param player: the player that originally sent", "# this regular expression will make sure that the \"#channel\" has at least", "library (logs to minqlx_discord.log in the homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() #", "mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title if game.map_title else", "to set on the given channels \"\"\" # if there are not triggered", "reply = \"{0} on **{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype,", "minqlx import Plugin import discord from discord import ChannelType, AllowedMentions from discord.ext.commands import", "original message replaced by properly formatted user mentions \"\"\" if not self.is_discord_logged_in(): return", "discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status,", "if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3 self.auth_attempts[ctx.message.author.id] -= 1 if self.auth_attempts[ctx.message.author.id]", "value = int(item) int_set.add(value) return int_set def status(self): if self.discord is None: return", "Quake Live status updates are send to discord * triggered relay of specific", "whether it should be filtered :return whether the message should not be relayed", "the vote, i.e. map name, which player to kick, etc. \"\"\" caller_name =", "to the discord channels \"\"\" if not self.is_discord_logged_in(): return # if we were", "pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to respond to from within", ":param msg: the message that was sent :param channel: the chnannel the message", "the Quake Live server to discord. :param player: the player that sent the", "= message # this regular expression will make sure that the \"@user\" has", "tell the player about this. if len(channel) > 1 and player is not", "discord_bot): \"\"\" initializes a discord bot with commands and listeners on this pseudo", "for discord chat channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\")", "reached. \" \"You will be barred from authentication for {} seconds.\" .format(bar_delay)) def", "\"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name not in", "player: minqlx.Player): \"\"\" Handler called when a player connects. The method sends a", "on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def", "to change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right", "match with portions of the channel name channel = [ch for ch in", "`here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the mydiscordbot, you also need to", "mydiscordbot, you also need to enable the Server Members Intent for the bot", ":param text: the text that shall be escaped for discord chat channels \"\"\"", "set up the bot here with the right commands, and run the discord.py", "\"\"\" Handler when a plugin is unloaded to make sure, that the connection", "player is not None: player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member), match)) alternatives", "Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\",", "as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self,", "channel_ids on discord provided. :param channel_ids: the ids of the channels the message", "help=\"send [message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\",", "for the discord library (logs to minqlx_discord.log in the homepath) \"\"\" def __init__(self,", "discord. :param msg: the message to check whether it should be filtered :return", "name :param channel_list: the list of channels connected to the discord server :param", "None self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\",", "a direct match for the whole name first member = [user for user", "= [user for user in member_list if user.nick is not None and user.nick.lower()", "# then we try a match with portions of the channel name channel", "game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle", "if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat", "from Discord...\") self.disconnect_discord() return if len(msg) == 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting", ":param version_information: the plugin's version_information string :param logger: the logger used for logging,", "a minqlx channel class to respond to from within minqlx for interactions with", "\"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\"", "and user.nick.lower().find(match.lower()) != -1)] if len(member) == 1: return list(member)[0] # we found", "general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect,", "return # Allow up to 3 attempts for the user's discord id to", "the discord relay channels. and updates the relay channel topic as well as", "of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the", "the user's discord id to authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] =", "was sent to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to red team)*\",", "limit: players_by_score = players_by_score[:limit] team_data = \"\" for player in players_by_score: team_data +=", "started. The method sends a corresponding message to the discord relay channels. :param", "msg, channel): \"\"\" Handler for reconnecting the discord bot to discord in case", "sent the message himself, do nothing. if message.author == self.discord.user: return # relay", "\"\"\" replaces a mentioned discord user (indicated by @user-hint with a real mention", "the discord library (logs to minqlx_discord.log in the homepath) \"\"\" def __init__(self, discord_client=None):", "happened in \"\"\" try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players())", "of a discord plugin: * full relay between Quake Live chat and discord,", "maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @", "trigger happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks", "the team output for :param limit: (default: None) just list the top players", "= [ch for ch in channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel) ==", "player) if channel is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod", "player_data(): \"\"\" Formats the top 5 scorers connected to the server in a", "score :param player_list: the list of players to generate the team output for", "returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find", "create a relay chat between the Quake Live chat and configured discord channels.", "key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not", "happen. :param message: the message that was sent. \"\"\" # guard clause to", "> 1 and player is not None: player.tell(\"Found ^6{}^7 matching discord users for", "happened in :param message: the message to send to minqlx \"\"\" prefix_length =", "of the original message. :param content: the message itself, ideally taken from message.clean_content", "nothing. if not channel_ids or len(channel_ids) == 0: return # set the topic", "\"\"\" Triggers the plugin's version information sent to discord :param ctx: the context", "await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message originate in", "def _format_message_to_quake(self, channel, author, content): \"\"\" Format the channel, author, and content of", "and have been mainly discussed on the fragstealers_inc discord tech channel of the", "the message did not include anything to forward, show the usage help text.", "= minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s]", "The token of the discord bot to use to connect to discord. *", "own thread to avoid blocking of the server for channel_id in channel_ids: channel", "will make sure that the \"@user\" has at least three characters, and is", "relayed to discord \"\"\" for message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg):", "the channel mentions in :param player: (default: None) when several alternatives are found", "flag to indicate whether to update the topic with the current game state", "between discord and Quake Live chat where a prefix needs to be used", "{}) A dictionary of channel_ids for kept topic suffixes and the related suffixes.", ":param ctx: the context the trigger happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids", "channels on the discord server. :return: the formatted message that may be sent", "came through, i.e. team chat, general chat, etc. \"\"\" if len(msg) > 2", "modes can be combined, i.e. full relay to a broadcast channel, and specific", "to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return", "from. :param author: the author of the original message. :param content: the message", "discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\")) self._topic_updater() async", "case. :return: the matching channel, or None if none or more than one", "\"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content = \"**{}**: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), message) self.send_to_discord_channels(self.discord_triggered_channel_ids, content)", "server(s). You need to install discord.py in your python installation, i.e. python3 -m", "(default: \"\") Comma separated list of channel ids where the topic suffix will", "between automatic topic updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids for", "a relay chat between the Quake Live chat and configured discord channels. There", "def is_authed(self, ctx): \"\"\" Checks whether a user is authed to the bot", "(default \"<PASSWORD>\") passwort for remote admin of the server via discord private messages", "The method sends a corresponding message to the discord relay channels. :param caller:", "the discord bot to use to connect to discord. * qlx_discordRelayChannelIds (default: \"\")", "the top players up to the given limit :return: a discord ready text", "is produced for command errors Might be changed in the future to log", "called when a vote was passed or failed. The method sends a corresponding", "maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\",", "if self.discord is None: return \"No discord connection set up.\" if self.is_discord_logged_in(): return", "discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version information\")) def", "which player to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The", "corresponding message to the discord relay channels. :param votes: the final votes :param", "channels configured, do nothing. if not channel_ids or len(channel_ids) == 0: return #", "the topic on a set of channel_ids on discord provided. :param channel_ids: the", "whether an author is currently barred from authentication to the bot :param ctx:", "if len(member) > 1 and player is not None: player.tell(\"Found ^6{}^7 matching discord", "gets disconnected. :param player: the player that send to the trigger :param msg:", "self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property", "context the trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\"", "to the bot to admin the server. \"\"\" def __init__(self, version_information, logger): \"\"\"", "DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The", "forwarded. These two modes can be combined, i.e. full relay to a broadcast", "the plugin that was unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def", "map :param factory: the map factory used \"\"\" content = \"*Changing map to", "\"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is not None:", "the bot here with the right commands, and run the discord.py bot in", "!discord to the discord triggered relay channels. :param player: the player that send", "to all relay channels. \"\"\" game = self.game if game is None: return", "maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper() reply = \"{0}", "- **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats the top 5", "= game.map_title if game.map_title else game.map gametype = game.type_short.upper() reply = \"{0} on", "message that was sent. \"\"\" # guard clause to avoid None messages from", "2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin to your own", "\"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message to a", "**{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper to", "adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\",", "a message from the triggered channels to minqlx :param ctx: the context the", "discord :param msg: the msg to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg))", "if not channel_ids or len(channel_ids) == 0: return # send the message in", "super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel)", "Triggers the plugin's version information sent to discord :param ctx: the context the", "member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find a user that", "2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return", "A dictionary of channel_ids for kept topic suffixes and the related suffixes. Make", "homepath) \"\"\" def __init__(self, discord_client=None): super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\")", "Plugin import discord from discord import ChannelType, AllowedMentions from discord.ext.commands import Bot, Command,", "self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether the message originate in a", "authenticating a discord user to the plugin via private message * qlx_discordExecPrefix (default:", "\"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the provided", "the channels the message should be sent to. :param content: the content of", "in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await self.discord.change_presence(activity=discord.Game(name=\"Quake Live\"))", "the current text representation of the game state \"\"\" if game.state == \"warmup\":", "the player that send to the trigger :param msg: the original message the", "this basic version of a discord plugin: * full relay between Quake Live", "@user-hint with a real mention :param message: the message to replace the user", "running.\" return \"Discord client not connected.\" def run(self): \"\"\" Called when the SimpleAsyncDiscord", "may be used in status messages and setting of channel topics. :param game:", "ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f() def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether", "qlx_discordExecPrefix (default: \"qlx\") command for authenticated users to execute server commands from discord", "member = [user for user in member_list if user.name.lower() == match.lower()] if len(member)", "def triggered_message(self, player, message): \"\"\" send a triggered message to the configured triggered_channel", "called when a player disconnects. The method sends a corresponding message to the", "used for the messages to be forwarded. These two modes can be combined,", "we found more than one matching member, let's tell the player about this.", "indicating whether the vote passed \"\"\" if passed: content = \"*Vote passed ({}", "match.lower()] if len(member) == 1: return member[0] # then try a direct match", "status messages and used in topics to indicate reveal more data about the", "channels, when configured. :param player: the player that connected \"\"\" content = \"_{}", "player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def", "Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\")", "team_chat message, that might be hidden to the given channel :param player: the", "\"\") Comma separated list of channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default:", "the trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks", "to discord :param msg: the message to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id},", "matching discord users for @{}:\".format(len(member), match)) alternatives = \"\" for alternative_member in member:", "+= \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data", "steam_id(self): return minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg):", "in the relay and triggered channels as well as private authentication to the", ":param ctx: the context the trigger happened in :param qlx_command: the command that", "relay of specific messages between discord and Quake Live chat where a prefix", "message): \"\"\" relay a team_chat message, that might be hidden to the given", "discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord", "a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed):", "return \"\" players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True) if limit: players_by_score =", "Forwards any messages after !discord to the discord triggered relay channels. :param player:", "channels. There are two basic types of relay in this basic version of", "the status information from :return: the topic that represents the current game state.", "== '': continue value = int(item) int_set.add(value) return int_set def status(self): if self.discord", "0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue']))", "asyncio.set_event_loop(loop) members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False,", "None is returned in that case. :return: the matching channel, or None if", "to be able to replace discord user mentions. If you don't need that,", "etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content = \"_{}", "player_list: the list of players to generate the team output for :param limit:", "\"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg): \"\"\" Checks whether the given", "the channel the original message came through :param message: the content of the", "(default: \"1\") replace mentions (@user and #channel) for triggered messages sent towards the", "another channel. For a description on how to set up a bot for", "Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler)", "and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken (default: \"\")", "case-sensitive first channel = [ch for ch in channel_list if ch.name == match]", "None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\"", "game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle", ":param factory: the map factory used \"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname))", "def is_message_in_relay_or_triggered_channel(self, ctx): \"\"\" Checks whether a message was either sent in a", "the trigger happened in :param qlx_command: the command that was sent by the", "topic set on discord channels. :param game: the game to derive the status", "the initial vote that passed or failed, i.e. map change, kick player, etc.", "current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake", "num_players, max_players) @staticmethod def get_game_info(game): \"\"\" Helper to format the current game.state that", "player.tell(alternatives) return None def triggered_message(self, player, message): \"\"\" send a triggered message to", "minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\" Format the channel,", "the other system, and some basic Quake Live status updates are send to", "(len(msg) == 2 and msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE", "function to update the topics on all the relay and all the triggered", "thread to avoid blocking of the server for channel_id in channel_ids: channel =", "r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called when", "for the version command related code. The basic ideas for this plugin came", "= \"_{} called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes,", "discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx:", "ctx): \"\"\" Checks whether a message was sent on a private chat to", "messages to be forwarded. These two modes can be combined, i.e. full relay", "f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception", "not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids =", "self.get_channel_topic(channel_id) if previous_topic is None: previous_topic = topic # preserve the original channel's", "by properly formatted channel mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message =", "-= 1 if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password. You have {}", "initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\")", "= client self.author = author self.discord_channel = discord_channel def __repr__(self): return \"{} {}\".format(str(self),", "else: topic_channel_ids = self.discord_relay_channel_ids # directly set the topic on channels with no", "channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\" Send a message to", "message that may be sent back to Quake Live. \"\"\" sender = author.name", "name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status,", "and the triggered channels (when configured), and sends a message to all relay", "the originating channel :param ctx: the context the trigger happened in \"\"\" try:", "playing Quake Live on discord. \"\"\" self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name,", "the qlx_discordReplaceMentions cvars as '0', you can leave it unchecked. By default, this", "are. No replacements for the ambiguous substitutions will happen. :return: the original message", "self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids,", "set member = [user for user in member_list if user.name.lower().find(match.lower()) != -1 or", "list of channel ids for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated", "%(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set", "of channels connected to the discord server :param player: (default: None) when several", "of the channel name channel = [ch for ch in channel_list if ch.name.lower().find(match.lower())", "sender, content) async def on_ready(self): \"\"\" Function called once the bot connected. Mainly", "back to Quake Live or discord happen. :param message: the message that was", "not channel_ids or len(channel_ids) == 0: return # set the topic in its", "else game.map gametype = game.type_short.upper() reply = \"{0} on **{1}** ({2}) with **{3}/{4}**", "re import asyncio import threading import logging import os from logging.handlers import RotatingFileHandler", "# send the message in its own thread to avoid blocking of the", "@staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find a user that matches the given", "game.type_short.upper() # CAUTION: if you change anything on the next line, you may", "messages that should not be sent from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages", "self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler called when a map is changed.", "or responses are completely switched off * qlx_discordEnableVersion (default: \"1\") indicates whether the", "was started. The method sends a corresponding message to the discord relay channels.", "that may be used in status messages and setting of channel topics. :param", "comma separated list of regular expressions for messages that should not be sent", "if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class", "Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel):", "corresponding message to the discord relay channels. :param caller: the player that initiated", "channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list of regular expressions for", "to provide help information. This is a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\"", "if channel.name not in handled_channels: return if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\",", "nothing. if not channel_ids or len(channel_ids) == 0: return # send the message", "in a new event_loop until completed. \"\"\" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) members_intent =", "channel class to respond to from within minqlx for interactions with discord \"\"\"", "Quake Live chat where a prefix needs to be used for the messages", "i.e. full relay to a broadcast channel, and specific messages from another channel.", "False return not self.discord.is_closed() and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the", "asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a message to the", "sure that the \"@user\" has at least three characters, and is either #", "\"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\")", "for kept topic suffixes and the related suffixes. Make sure to use single", "context the trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\"", "len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None):", "self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers game status information sent towards the", "needs edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list", "formatted message that may be sent back to Quake Live. \"\"\" sender =", "name for proper formatting to discord (i.e. replace '*' (asterisks) with a variant", "and configured discord channels. There are two basic types of relay in this", "\" \"You can now use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return #", "output for :param limit: (default: None) just list the top players up to", "should be filtered :return whether the message should not be relayed to discord", "a description on how to set up a bot for you discord network", "gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no game running.\" if", "reason in [\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else:", "seconds between automatic topic updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids", "channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not None:", "\"\"\" This is a plugin created by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests>", "a message was either sent in a configured relay or triggered channel :param", "returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find a user", "switched off * qlx_discordEnableVersion (default: \"1\") indicates whether the bot will respond to", "== \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg) ==", "SimpleAsyncDiscord client which is used to communicate to discord, and provides certain commands", "the discord bot. * qlx_discordAuthCommand (default: \"auth\") command for authenticating a discord user", "\" *(to specs)*\"} if channel.name not in handled_channels: return if self.is_filtered_message(msg): return if", "channel the message came through, i.e. team chat, general chat, etc. \"\"\" if", "return \"No discord connection set up.\" if self.is_discord_logged_in(): return \"Discord connection up and", "Live or discord happen. :param message: the message that was sent. \"\"\" #", "topic[-10:] for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is None: previous_topic", "on discord. \"\"\" self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to", "ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e: send_message =", "passed \"\"\" if passed: content = \"*Vote passed ({} - {}).*\".format(*votes) else: content", "called once a message is send through discord. Here the main interaction points", "# if direct searches for the match fail, we try to match portions", "a match with portions of the channel name channel = [ch for ch", "self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map)", "def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic on the given channels and", "[user for user in member_list if user.nick is not None and user.nick.lower() ==", "(default: \"1\") Boolean flag to indicate whether to update the topic with the", "context the trigger happened in \"\"\" return ctx.message.author.id in self.auth_attempts and self.auth_attempts[ctx.message.author.id] <=", "nickname member = [user for user in member_list if user.nick is not None", "triggered messages sent towards the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for", "or game.blue_score < 0: return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state", "The method sends a corresponding message to the discord relay channels. :param votes:", "def initialize_bot(self, discord_bot): \"\"\" initializes a discord bot with commands and listeners on", "_topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval,", "context of the original message sent for authentication :param password: the password to", "import threading import logging import os from logging.handlers import RotatingFileHandler import minqlx from", "relay and triggered channels as well as private authentication to the bot to", "self.discord is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay", "content): \"\"\" Format the channel, author, and content of a message so that", "update from the bot in the game console and server logfile, and sets", "be filtered and not be sent to discord. :param msg: the message to", "content = message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def", "handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when a player connects. The method sends", "help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current", "plugin is unloaded. :param plugin: the plugin that was unloaded. \"\"\" if plugin", "the vote itself, i.e. map change, kick player, etc. :param args: any arguments", "\"\"\" get the topic of the provided channel id :param channel_id: the id", "channel \"\"\" channel = self.discord.get_channel(channel_id) if channel is None: return None return channel.topic", "for #{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel in channel: alternatives += \"#{}", "mentions used, this player is told what the alternatives are. No replacements for", "Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on the fragstealers_inc discord", "self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None,", "discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx),", "if user.name.lower() == match.lower()] if len(member) == 1: return member[0] # then try", "*(to red team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if", "the plugin's version information sent to discord :param ctx: the context the trigger", "**{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data(): \"\"\" Formats the top 5 scorers", "len(topic_ending):] if position != -1 else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix =", "mainly discussed on the fragstealers_inc discord tech channel of the Bus Station server(s).", "'0', you can leave it unchecked. By default, this will be enabled and", "player sent (includes the trigger) :param channel: the channel the message came through,", "\\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled", "len(msg) == 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\")", "(includes the trigger) :param channel: the channel the message came through, i.e. team", "# this regular expression will make sure that the \"@user\" has at least", "a message was sent on a private chat to the bot :param ctx:", "in. :param version_information: the plugin's version_information string :param logger: the logger used for", "one are found \"\"\" # try a direct match for the whole name", "discord user to the plugin via private message * qlx_discordExecPrefix (default: \"qlx\") command", "respond to !version or responses are completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\")", "len(channel) == 1: return channel[0] # then try a case-insensitive direct match with", "or at the beginning of the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list", "trigger channels, when configured. :param mapname: the new map :param factory: the map", "authentication to the bot :param ctx: the context the trigger happened in \"\"\"", "Handler called when the game is in countdown, i.e. about to start. This", "super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\",", "that was sent by the user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self,", "message itself, ideally taken from message.clean_content to avoid ids of mentioned users and", "chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of channel ids for", "the discord.py bot in a new event_loop until completed. \"\"\" loop = asyncio.new_event_loop()", "the message was sent to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \" *(to", "\"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await self.reply_to_context(ctx, \"You have been successfully authenticated.", "description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now configured bot to discord", "need to change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the", "message to the given channel :param player: the player that originally sent the", "and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if", "= mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update", "list of channels connected to the discord server :param player: (default: None) when", "their score \"\"\" if len(player_list) == 0: return \"\" players_by_score = sorted(player_list, key=lambda", "customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\"", "if not channel_ids or len(channel_ids) == 0: return # take the final 10", "len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title if game.map_title else game.map gametype =", "\"\"\" # try a direct match for the whole name first member =", "of relay in this basic version of a discord plugin: * full relay", "been mainly discussed on the fragstealers_inc discord tech channel of the Bus Station", "of the channels the topic should be set upon. :param topic: the new", "updates are send to discord * triggered relay of specific messages between discord", "the SimpleAsyncDiscord thread is started. We will set up the bot here with", "\"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$,", "self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel,", "the topics of the relay channels and the triggered channels (when configured), and", "\".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e))", "player left \"\"\" if reason in [\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]:", "self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name),", "for user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match:", "qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec messages from discord via private message", "return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if not", "sent in a configured relay or triggered channel :param ctx: the context the", "the bot :param ctx: the context the trigger happened in :param qlx_command: the", "None and user.nick.lower().find(match.lower()) != -1)] if len(member) == 1: return list(member)[0] # we", "or failed. The method sends a corresponding message to the discord relay channels.", "\"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player, message): \"\"\" send a triggered", "etc. \"\"\" if len(msg) > 2 or (len(msg) == 2 and msg[1] not", "mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken (default: \"\") The token", "and messages on the Quake Live server to discord. :param player: the player", "a player disconnects. The method sends a corresponding message to the discord relay", "of the server for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is", "a description. Uses: * qlx_discordBotToken (default: \"\") The token of the discord bot", "topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if position != -1", "is None: return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self,", "the trigger happened in :param message: the message to send to minqlx \"\"\"", "player: the player that originally sent the message :param channel: the channel the", "match :param match: the match to look for in the user name and", "ch.name == match] if len(channel) == 1: return channel[0] # then try a", "self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\")", "channel_list if ch.name.lower().find(match.lower()) != -1] if len(channel) == 1: return channel[0] # we", "message to the discord relay channels. :param votes: the final votes :param vote:", "Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\",", "gametype = game.type_short.upper() reply = \"{0} on **{1}** ({2}) with **{3}/{4}** players. {5}\".format(", "send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self, channel_ids, content): \"\"\"", "len(member) == 1: return list(member)[0] # we found more than one matching member,", "async def trigger_status(self, ctx): \"\"\" Triggers game status information sent towards the originating", "Send a message to a set of channel_ids on discord provided. :param channel_ids:", "discord users for @{}:\".format(len(member), match)) alternatives = \"\" for alternative_member in member: alternatives", "the main discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents)", "if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting", "Live chat where a prefix needs to be used for the messages to", "sends a corresponding message to the discord relay channels, and updates the relay", "self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\",", "console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set() for item in string_set: if", "the discord relay channels. :param caller: the player that initiated the vote :param", "author, discord_channel): self.client = client self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property", "context the trigger happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx):", "def trigger_status(self, ctx): \"\"\" Triggers game status information sent towards the originating channel", "\"\"\" Checks whether the message originate in a configured triggered channel :param ctx:", "communicate to discord, and provides certain commands in the relay and triggered channels", "2 and msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg)", "the content of the message to send to the discord channels \"\"\" if", "to match portions of the name or portions of the nick, if set", "^6{}^7 matching discord users for @{}:\".format(len(member), match)) alternatives = \"\" for alternative_member in", "For a description on how to set up a bot for you discord", "if match in [\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if", "= game.type_short.upper() reply = \"{0} on **{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo,", "def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called when a player disconnects. The", "player disconnects. The method sends a corresponding message to the discord relay channels,", "the bot :param ctx: the context the trigger happened in \"\"\" return ctx.message.author.id", "ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin", "ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to", "channels, when configured. :param mapname: the new map :param factory: the map factory", "return member[0] # then try a direct match at the user's nickname member", "async def version(self, ctx): \"\"\" Triggers the plugin's version information sent to discord", "replaces a mentioned discord channel (indicated by #channel-hint with a real mention :param", "triggered relay channels' topics! return \"{0} on **{1}** ({2}) with **{3}/{4}** players. \".format(ginfo,", "*(to specs)*\"} if channel.name not in handled_channels: return if self.is_filtered_message(msg): return if channel.name", "def handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin is unloaded to make sure,", "information sent to discord :param ctx: the context the trigger happened in \"\"\"", ":param qlx_command: the command that was sent by the user \"\"\" @minqlx.next_frame def", "votes :param vote: the initial vote that passed or failed, i.e. map change,", "content) async def on_ready(self): \"\"\" Function called once the bot connected. Mainly displays", "discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author,", "topic: the topic to set on the given channels \"\"\" # if there", "pass_context=True, help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display", "that matches the given match :param match: the match to look for in", "channel name match case-sensitive first channel = [ch for ch in channel_list if", "for alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self,", "msg: the message to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer):", "when a player connects. The method sends a corresponding message to the discord", "blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name not in handled_channels: return if", "was sent. \"\"\" # guard clause to avoid None messages from processing. if", "{}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin is unloaded to", "in [\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match, member_list, player) if member is", "to respond to from within minqlx for interactions with discord \"\"\" def __init__(self,", "def get_channel_topic(self, channel_id): \"\"\" get the topic of the provided channel id :param", "minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for all chat messages on the", "reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self,", "= [ch for ch in self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches", "message \"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message", "def qlx(self, ctx, *qlx_command: str): \"\"\" Handles exec messages from discord via private", "in \"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str): \"\"\"", "the Quake Live console. :param channel: the channel, the message came from. :param", "exception, ctx): \"\"\" overrides the default command error handler so that no exception", "attempts for the user's discord id to authenticate. if ctx.message.author.id not in self.auth_attempts:", "= \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content = \"**{}**: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), message) self.send_to_discord_channels(self.discord_triggered_channel_ids,", "self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the minqlx plugin's", "the message to send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def send_to_discord_channels(self,", "the discord triggered relay channels. :param player: the player that send to the", "relay and all the triggered channels :param topic: the topic to set on", "the player about this. if len(member) > 1 and player is not None:", "\"\"\" relay a message to the given channel :param player: the player that", "player.score) return team_data def is_filtered_message(self, msg): \"\"\" Checks whether the given message should", "intact on the configured channels :param channel_ids: the set of channels to update", "\"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\",", "Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") #", "command. Forwards any messages after !discord to the discord triggered relay channels. :param", "values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks", "game = self.game if game is None: return topic = mydiscordbot.game_status_information(game) top5_players =", "is None: return asyncio.run_coroutine_threadsafe(self.discord.change_presence(status=\"offline\"), loop=self.discord.loop) asyncio.run_coroutine_threadsafe(self.discord.logout(), loop=self.discord.loop) def relay_message(self, msg): \"\"\" relay a", "bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\",", "return channel.topic def stop(self): \"\"\" stops the discord client \"\"\" if self.discord is", "channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop)", "ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title if", "discord relay channels. :param caller: the player that initiated the vote :param vote:", ":return: the topic that represents the current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game)", "player=None): \"\"\" find a user that matches the given match :param match: the", "== \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return", "in the channel name :param channel_list: the list of channels connected to the", "cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler for reconnecting the discord bot to", "a discord bot with commands and listeners on this pseudo cog class :param", "1 and player is not None: player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member),", "return \"Match starting\" if game.roundlimit in [game.blue_score, game.red_score] or game.red_score < 0 or", "the fragstealers_inc discord tech channel of the Bus Station server(s). You need to", "send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy", "self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and its interactions on", "the channel name channel = [ch for ch in channel_list if ch.name.lower() ==", "when the game is in countdown, i.e. about to start. This function mainly", "\"1\") replace mentions (@user and #channel) for messages sent towards relay channels *", "# User has reached maximum auth attempts, we will bar her/him from authentication", "\"\"\" self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\") await", "from the topic, and search for it in the current topic topic_ending =", "the discord channels \"\"\" if not self.is_discord_logged_in(): return # if we were not", "\"quakelive\") Message prefix for the trigger on triggered relay channels. * qlx_discordTriggerStatus (default:", "channel.name not in handled_channels: return if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]:", "connected \"\"\" content = \"_{} connected._\".format(mydiscordbot.escape_text_for_discord(player.clean_name)) self.discord.relay_message(content) @staticmethod def escape_text_for_discord(text): \"\"\" Escapes the", "= Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\", "discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the plugin's version information\")) def reply_to_context(self, ctx, message):", "in :param message: the message to send to minqlx \"\"\" prefix_length = len(\"{}{}", "discord happen. :param message: the message that was sent. \"\"\" # guard clause", "produced for command errors Might be changed in the future to log those", "sent to. :param content: the content of the message to send to the", "the channel, author, and content of a message so that it will be", "0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates", "to set on all the channels \"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic:", "triggered channel :param ctx: the context the trigger happened in \"\"\" return ctx.message.channel.id", "return returned_message @staticmethod def find_user_that_matches(match, member_list, player=None): \"\"\" find a user that matches", "the message himself, do nothing. if message.author == self.discord.user: return # relay all", "topic from :return: the topic of the channel \"\"\" channel = self.discord.get_channel(channel_id) if", "def __init__(self, discord_client=None): super().__init__() # maybe initialize plugin cvars Plugin.set_cvar_once(\"qlx_discordBotToken\", \"\") Plugin.set_cvar_once(\"qlx_discordRelayChannelIds\", \"\")", "\"\"\" sender = author.name if author.nick is not None: sender = author.nick if", "discord_channel): super().__init__(\"discord\") self.client = client self.author = author self.discord_channel = discord_channel def __repr__(self):", "to modify this plugin to your own one, except for the version command", "self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") #", "= self.game if game is None: return topic = mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data()", "description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord)", "\"\"\" if passed: content = \"*Vote passed ({} - {}).*\".format(*votes) else: content =", "def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author, ctx.message.channel), \" \".join(qlx_command), DiscordChannel(self, ctx.message.author, ctx.message.channel)) except", "# File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\",", "for it in the current topic topic_ending = topic[-10:] for channel_id in channel_ids:", "Uses: * qlx_discordBotToken (default: \"\") The token of the discord bot to use", "ids of mentioned users and channels on the discord server. :return: the formatted", "minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception, ctx): \"\"\" overrides the default", "with a real mention :param message: the message to replace the user mentions", "returned_message @staticmethod def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel that matches the", "the player sent (includes the trigger) :param channel: the channel the message came", "discord.py in your python installation, i.e. python3 -m pip install -U discord.py \"\"\"", "have been mainly discussed on the fragstealers_inc discord tech channel of the Bus", "__init__(self, version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord client the discord bot runs", "\"{0} on **{1}** ({2}) with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players,", "for triggered messages sent towards the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort", "player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel", "len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception, ctx): \"\"\"", "(indicated by @user-hint with a real mention :param message: the message to replace", "arguments of the vote, i.e. map name, which player to kick, etc. :param", "commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to 3 attempts for the user's", "provided channel id :param channel_id: the id of the channel to get the", "for :param limit: (default: None) just list the top players up to the", "@staticmethod def team_data(player_list, limit=None): \"\"\" generates a sorted output of the team's player", "Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called", "connection to discord is properly closed when this plugin is unloaded. :param plugin:", "\"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory):", ":param passed: boolean indicating whether the vote passed \"\"\" if passed: content =", "top5 scorers with the scores and connection time to the server \"\"\" player_data", "user to the plugin via private message * qlx_discordExecPrefix (default: \"qlx\") command for", "return None def replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned discord channel (indicated", "{}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites the channel.reply function to relay messages", "super().__init__(\"discord\") self.client = client self.author = author self.discord_channel = discord_channel def __repr__(self): return", "mentions in :param player: (default: None) when several alternatives are found for the", "originally sent the message :param message: the content of the message \"\"\" if", "= \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler called when", "game. :return: string of the current top5 scorers with the scores and connection", "channel for configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list", "\"\"\" player_data = \"\" teams = Plugin.teams() if len(teams['red']) > 0: player_data +=", "sends a message to all relay channels. \"\"\" game = self.game if game", "return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self):", "pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx", "trigger) :param channel: the channel the message came through, i.e. team chat, general", "channel, let's tell the player about this. if len(channel) > 1 and player", "or responses are completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel", "0: return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return", "for you discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5", "Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions", "author, discord_channel): super().__init__(\"discord\") self.client = client self.author = author self.discord_channel = discord_channel def", "\"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether an author", "on the discord server. :return: the formatted message that may be sent back", "# if the bot sent the message himself, do nothing. if message.author ==", "return # User has reached maximum auth attempts, we will bar her/him from", "used, this player is told what the alternatives are. None is returned in", "Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar values from the server", ":param msg: the msg to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class", "configured, do nothing. if not channel_ids or len(channel_ids) == 0: return # take", "Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\",", "]{3,})\") member_list = [user for user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match", "\"1\") indicates whether the bot will respond to !version or responses are completely", "the message \"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player)", "of channel_ids for kept topic suffixes and the related suffixes. Make sure to", "# connect the now configured bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def", "if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids", "Relays a message from the triggered channels to minqlx :param ctx: the context", "red team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name", "discord private messages to the discord bot. * qlx_discordAuthCommand (default: \"auth\") command for", "\"The server\" content = \"_{} called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content)", "Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set(", "Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat =", ":param channel_id: the id of the channel to get the topic from :return:", "{0}{1} command for more info on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error):", "the relay channels back to Quake Live. if message.channel.id in self.discord_relay_channel_ids: content =", "three characters, and is either # prefixed by a space or at the", "name match case-sensitive first channel = [ch for ch in channel_list if ch.name", "reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks", "channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is None: previous_topic = topic", "when the message did not include anything to forward, show the usage help", "file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize = minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt", "on discord provided. :param channel_ids: the ids of the channels the message should", "ctx: the context of the original message sent for authentication :param password: the", "in that case. :return: the matching member, or None if none or more", "replace discord user mentions. If you don't need that, i.e. you did configured", "self.author = author self.discord_channel = discord_channel def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def", "a broadcast channel, and specific messages from another channel. For a description on", "def update_topics_on_relay_and_triggered_channels(self, topic): \"\"\" Helper function to update the topics on all the", "of the message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message,", "class to relay messages to discord \"\"\" def __init__(self, client, author, discord_channel): self.client", "edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of", "author: the author of the original message. :param content: the message itself, ideally", "= Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\")", "a message to the configured relay_channels :param msg: the message to send to", "the topic to set on the given channels \"\"\" # if there are", "discord connection set up.\" if self.is_discord_logged_in(): return \"Discord connection up and running.\" return", "match.lower()] if len(channel) == 1: return channel[0] # then we try a match", "a mentioned discord channel (indicated by #channel-hint with a real mention :param message:", "run(self): \"\"\" Called when the SimpleAsyncDiscord thread is started. We will set up", "relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and #channel) for triggered", "it in the current topic topic_ending = topic[-10:] for channel_id in channel_ids: previous_topic", ":param ctx: the context the trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def", "with discord's formattings.) :param text: the text that shall be escaped for discord", "you don't need that, i.e. you did configured and of the qlx_discordReplaceMentions cvars", "\"\"\" pass def _topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError: return topic =", "author, content): \"\"\" Format the channel, author, and content of a message so", "channel: the channel the original message came through :param message: the content of", "final votes :param vote: the initial vote that passed or failed, i.e. map", "< 0 or game.blue_score < 0: return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score)", "return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self, player:", "attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum auth attempts, we will", "except Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception() f()", "player) if member is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod", "chat messages on the server. This function will forward and messages on the", "name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if", "= Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids =", "= \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\" replaces", "\"\"\" if game.state == \"warmup\": return \"Warmup\" if game.state == \"countdown\": return \"Match", "more info on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel):", "= Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger =", "channel_ids, do nothing. if not channel_ids or len(channel_ids) == 0: return # set", "discord channel (indicated by #channel-hint with a real mention :param message: the message", "found more than one matching member, let's tell the player about this. if", "to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def connect_discord(self):", "\"Wrong password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached", "not be sent to discord. :param msg: the message to check whether it", "channel_ids: the ids of the channels the message should be sent to. :param", "ChannelType, AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\"", "is told what the alternatives are. None is returned in that case. :return:", "= sorted(player_list, key=lambda k: k.score, reverse=True) if limit: players_by_score = players_by_score[:limit] team_data =", "case it gets disconnected. :param player: the player that send to the trigger", "within minqlx for interactions with discord \"\"\" def __init__(self, client, author, discord_channel): super().__init__(\"discord\")", "user mentions. If you don't need that, i.e. you did configured and of", "\"!\") Command prefix for all commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message", "\"\"\" Generate the text for the topic set on discord channels. :param game:", "= minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic) threading.Timer(self.discord_topic_update_interval, self._topic_updater).start() def update_topics_on_relay_and_triggered_channels(self,", "Comma separated list of channel ids for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\")", "- self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix on the channels that are", "channels, when configured. :param player: the player that connected :param reason: the reason", "init the main discord interactions if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop,", "| self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly set the topic on channels", "content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\"", "full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of channel ids for", "\"\"\" if not self.is_discord_logged_in(): return message returned_message = message # this regular expression", "escaped for discord chat channels \"\"\" escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*',", "used for logging, usually passed through from the minqlx plugin. \"\"\" super().__init__() self.version_information", "or at the beginning of the string matcher = re.compile(\"(?:^| )#([^ ]{3,})\") channel_list", "\" \"You will be barred from authentication for {} seconds.\" .format(bar_delay)) def f():", "reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not None: returned_message =", "if len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list,", "did not include anything to forward, show the usage help text. if len(msg)", "Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler for reconnecting", "the topic on the given channels and keeps the topic suffix intact on", "\"#channel\" has at least three characters, and is either # prefixed by a", "alternatives are. No replacements for the ambiguous substitutions will happen. :return: the original", "several alternatives are found for the mentions used, this player is told what", "= {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\",", "was either sent in a configured relay or triggered channel :param ctx: the", "none or more than one are found \"\"\" # try a direct channel", "\"\"\" Checks whether an author is currently barred from authentication to the bot", "the bot send the current status of the game server. * qlx_discordMessagePrefix (default:", "channel topic as well as the trigger channels, when configured. :param mapname: the", "listeners on this pseudo cog class :param discord_bot: the discord_bot to initialize \"\"\"", "for messages that should not be sent from quake live to discord *", "!discord command. Forwards any messages after !discord to the discord triggered relay channels.", "await self.reply_to_context(ctx, \"Wrong password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User", "\"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\" replaces a", ":param player: (default: None) when several alternatives are found for the mentions used,", "are found \"\"\" # try a direct match for the whole name first", "find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel that matches the given match :param", "self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool)", "for a description. Uses: * qlx_discordBotToken (default: \"\") The token of the discord", "self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the", "private message :param ctx: the context of the original message sent for authentication", "name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True,", "in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic on the triggered channels", "ambiguous substitutions will happen. :return: the original message replaced by properly formatted user", "player: minqlx.Player, msg, channel): \"\"\" Handler of the !discord command. Forwards any messages", "= [ch for ch in channel_list if ch.name == match] if len(channel) ==", "num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx):", "indicates whether the bot will respond to !version or responses are completely switched", "player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def team_data(player_list, limit=None): \"\"\" generates a", "Live. if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply(", "len(channel_ids) == 0: return # send the message in its own thread to", "is not None and user.nick.lower().find(match.lower()) != -1)] if len(member) == 1: return list(member)[0]", "!= -1)] if len(member) == 1: return list(member)[0] # we found more than", "Amount of seconds between automatic topic updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary", "\"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title", "player is told what the alternatives are. No replacements for the ambiguous substitutions", "self.discord.get_all_channels() if ch.type in [ChannelType.text, ChannelType.voice, ChannelType.group]] matches = matcher.findall(returned_message) for match in", "the discord bot runs in. :param version_information: the plugin's version_information string :param logger:", "game status information sent towards the originating channel :param ctx: the context the", "left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum auth attempts, we will bar", "to discord \"\"\" def __init__(self, client, author, discord_channel): self.client = client self.author =", "match)) alternatives = \"\" for alternative_member in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives)", "a minqlx dummy player class to relay messages to discord \"\"\" def __init__(self,", "full relay to a broadcast channel, and specific messages from another channel. For", "name channel = [ch for ch in channel_list if ch.name.lower() == match.lower()] if", "Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\") Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\",", "\"spectator_chat\": \" *(to specs)*\"} if channel.name not in handled_channels: return if self.is_filtered_message(msg): return", "self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic", "connect to discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated list of channel ids", "when a plugin is unloaded to make sure, that the connection to discord", "channels that are configured accordingly self.update_topic_on_channels_and_keep_channel_suffix( topic_channel_ids & self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids,", "return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state == \"in_progress\": return \"Match", "the message to replace the user mentions in :param player: (default: None) when", "match portions of the name or portions of the nick, if set member", "minqlx.Player, msg, channel): \"\"\" Handler of the !discord command. Forwards any messages after", "= Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST)", "either # prefixed by a space or at the beginning of the string", ":param ctx: the context the trigger happened in \"\"\" return ctx.message.author.id in self.auth_attempts", ":param msg: the original message the player sent (includes the trigger) :param channel:", "are not triggered relay channels configured, do nothing. if not channel_ids or len(channel_ids)", "passed): \"\"\" Handler called when a vote was passed or failed. The method", "checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel],", "alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player,", "to discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated list of channel ids for", "used in status messages and setting of channel topics. :param game: the game", "def reply(self, msg): \"\"\" overwrites the channel.reply function to relay messages to discord", "are free to modify this plugin to your own one, except for the", "what the alternatives are. None is returned in that case. :return: the matching", "the discord server. :return: the formatted message that may be sent back to", "cog class :param discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda", "the message to check whether it should be filtered :return whether the message", "content: the message itself, ideally taken from message.clean_content to avoid ids of mentioned", "in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is None: previous_topic = topic #", "user_match: len(user_match), reverse=True): if match in [\"all\", \"everyone\", \"here\"]: continue member = SimpleAsyncDiscord.find_user_that_matches(match,", "discord relay channels. :param votes: the final votes :param vote: the initial vote", "and server logfile, and sets the bot to playing Quake Live on discord.", "bot will respond to !help or responses are completely switched off * qlx_discordEnableVersion", "ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers game status", "if direct searches for the match fail, we try to match portions of", "completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name of the", "its current game. :return: string of the current top5 scorers with the scores", "the player left \"\"\" if reason in [\"disconnected\", \"timed out\", \"was kicked\", \"was", "\"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\") Plugin.set_cvar_once(\"qlx_discordKeptTopicSuffixes\", \"{}\") Plugin.set_cvar_once(\"qlx_discordCommandPrefix\", \"!\") Plugin.set_cvar_once(\"qlx_discordTriggerTriggeredChannelChat\", \"quakelive\") Plugin.set_cvar_once(\"qlx_discordTriggerStatus\", \"status\") Plugin.set_cvar_once(\"qlx_discordMessagePrefix\", \"[DISCORD]\")", "# of the triggered relay channels' topics! return \"{0} on **{1}** ({2}) with", "bot sent the message himself, do nothing. if message.author == self.discord.user: return #", "this. if len(channel) > 1 and player is not None: player.tell(\"Found ^6{}^7 matching", "when a map is changed. The method sends a corresponding message to the", "self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class to relay messages", "self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\", self.cmd_discordbot, permission=1, usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and", "console and server logfile, and sets the bot to playing Quake Live on", "\"{}.\".format(reason) else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content)", "the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix for all commands from discord", "the bot will respond to !version or responses are completely switched off *", "system, and some basic Quake Live status updates are send to discord *", "self.discord is None: return \"No discord connection set up.\" if self.is_discord_logged_in(): return \"Discord", "vote was passed or failed. The method sends a corresponding message to the", "return value may be used for status messages and used in topics to", "* qlx_discordExecPrefix (default: \"qlx\") command for authenticated users to execute server commands from", "if matcher.match(msg): return True return False def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel):", "in the game console and server logfile, and sets the bot to playing", "\"^\\!s$, ^\\!p$\") comma separated list of regular expressions for messages that should not", "and Quake Live chat where a prefix needs to be used for the", "separated list of channel ids for relaying team chat messages. * qlx_discordTriggeredChannelIds (default:", ":return: a discord ready text representation of the player's of that team by", "on the next line, you may need to change the topic_ending logic in", "for configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list of", "checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with", "def is_barred_from_auth(self, ctx): \"\"\" Checks whether an author is currently barred from authentication", "sender = author.nick if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2", "Handler called when a player disconnects. The method sends a corresponding message to", "{}).*\".format(*votes) else: content = \"*Vote failed.*\" self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\"", "\"Type {0}{1} command for more info on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self,", "string :param logger: the logger used for logging, usually passed through from the", "= message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self,", "configured channels :param channel_ids: the set of channels to update the topic on", "sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and #channel)", "bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path", "with this prefix * qlx_discordEnableHelp (default: \"1\") indicates whether the bot will respond", "self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the topic on the triggered channels self.set_topic_on_discord_channels({channel_id},", "make sure that the \"#channel\" has at least three characters, and is either", "minqlx.owner() @property def channel(self): return DiscordChannel(self.client, self.author, self.discord_channel) def tell(self, msg): \"\"\" overwrites", "sure to use single quotes for the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command", "to discord :param msg: the msg to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id},", "the message itself, ideally taken from message.clean_content to avoid ids of mentioned users", "if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids # directly", "properly formatted user mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message", "%(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console", "is unloaded to make sure, that the connection to discord is properly closed", "match with the channel name channel = [ch for ch in channel_list if", "Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled: self.setup_extended_logger() def setup_extended_logger(self):", "console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set() for item in string_set:", "for the topic set on discord channels. :param game: the game to derive", "\"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used to communicate", "etc. :param passed: boolean indicating whether the vote passed \"\"\" if passed: content", "bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names =", "channel :param ctx: the context the trigger happened in \"\"\" return ctx.message.channel.id in", "qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated list of regular expressions for messages that", "class :param discord_bot: the discord_bot to initialize \"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx:", "factory): \"\"\" Handler called when a map is changed. The method sends a", "to send to the discord channels \"\"\" if not self.is_discord_logged_in(): return # if", "in order to be able to replace discord user mentions. If you don't", "to the discord relay channels, and updates the relay channel topic as well", "sent the message :param msg: the message that was sent :param channel: the", "function will forward and messages on the Quake Live server to discord. :param", "either sent in a configured relay or triggered channel :param ctx: the context", "will be displayed nicely in the Quake Live console. :param channel: the channel,", "member_list: the list of members connected to the discord server :param player: (default:", "replace '*' (asterisks) with a variant to not interfere with discord's formattings.) :param", ":param password: the password to authenticate \"\"\" if password == self.discord_admin_password: self.authed_discord_ids.add(ctx.message.author.id) await", "to avoid None messages from processing. if not message: return # if the", "relay all messages from the relay channels back to Quake Live. if message.channel.id", "to discord \"\"\" for message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return", "^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async", "def relay_team_chat_message(self, player, channel, message): \"\"\" relay a team_chat message, that might be", "switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name of the discord", "triggered message from QL with this text portion. Useful when running multiple servers", ":param ctx: the context the trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def", "a discord plugin: * full relay between Quake Live chat and discord, where", "= logger self.discord = None self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token =", "logging import os from logging.handlers import RotatingFileHandler import minqlx from minqlx import Plugin", "SimpleAsyncDiscord thread is started. We will set up the bot here with the", "= len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title if game.map_title else game.map gametype", "the channel.reply function to relay messages to discord :param msg: the message to", "\"\"\" generates a sorted output of the team's player by their score :param", "= logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs)", "[message...] to the Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True,", "suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix on the channels", "himself, do nothing. if message.author == self.discord.user: return # relay all messages from", ":param caller: the player that initiated the vote :param vote: the vote itself,", "about this. if len(member) > 1 and player is not None: player.tell(\"Found ^6{}^7", "we will bar her/him from authentication for 5 minutes (300 seconds) bar_delay =", "bot :param ctx: the context the trigger happened in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel)", "to log those problems to the minqlx.logger \"\"\" pass def _topic_updater(self): try: game", "real mention :param message: the message to replace the user mentions in :param", "the content of the message \"\"\" if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message", "Prefix any triggered message from QL with this text portion. Useful when running", "to the discord relay channels. :param caller: the player that initiated the vote", "authentication for {} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def", "original message came through :param message: the content of the message \"\"\" if", "where a prefix needs to be used for the messages to be forwarded.", "set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled =", "matching member, or None if none or more than one are found \"\"\"", "\"\"\" Function called once a message is send through discord. Here the main", "\"[DISCORD]\") messages from discord to quake live will be prefixed with this prefix", "prefixed with this prefix * qlx_discordEnableHelp (default: \"1\") indicates whether the bot will", "initialize the discord bot and its interactions on the discord server if discord_client", "+= \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg): \"\"\" Checks whether the", "message to the discord relay channels. and updates the relay channel topic as", "in \"\"\" return ctx.message.author.id in self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether an", "player is not None: player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel), match)) alternatives", "bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions =", "when several alternatives are found for the mentions used, this player is told", "(@user and #channel) for messages sent towards relay channels * qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\")", "\"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned", "i.e. python3 -m pip install -U discord.py \"\"\" import re import asyncio import", "or len(channel_ids) == 0: return # send the message in its own thread", "alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player, message): \"\"\" send", "QL with this text portion. Useful when running multiple servers on the same", "changed. The method sends a corresponding message to the discord relay channels. and", "ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently no game", "\"\"\" Helper function to update the topics on all the relay and all", "vote, args, passed): \"\"\" Handler called when a vote was passed or failed.", "properly closed when this plugin is unloaded. :param plugin: the plugin that was", "discord channels. :param game: the game to derive the status information from :return:", "an author is currently barred from authentication to the bot :param ctx: the", "if self.auth_attempts[ctx.message.author.id] > 0: await self.reply_to_context(ctx, \"Wrong password. You have {} attempts left.\"", "came from. :param author: the author of the original message. :param content: the", "= game.type_short.upper() # CAUTION: if you change anything on the next line, you", ":param player: the player that send to the trigger :param msg: the original", "Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions", "plugin: the plugin that was unloaded. \"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod", "commands on the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status", "via private message to the bot :param ctx: the context the trigger happened", "provided any channel_ids, do nothing. if not channel_ids or len(channel_ids) == 0: return", "== 0: return # take the final 10 characters from the topic, and", "@ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler)", "bot here with the right commands, and run the discord.py bot in a", "matching channel, or None if none or more than one are found \"\"\"", "should be set. \"\"\" # if we were not provided any channel_ids, do", "to discord * triggered relay of specific messages between discord and Quake Live", "Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar values from", "for the user's discord id to authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id]", "def relay_message(self, msg): \"\"\" relay a message to the configured relay_channels :param msg:", "handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler", "originally sent the message :param channel: the channel the original message came through", "matching member, let's tell the player about this. if len(member) > 1 and", "provides certain commands in the relay and triggered channels as well as private", "try a match with portions of the channel name channel = [ch for", "chnannel the message was sent to \"\"\" handled_channels = {\"chat\": \"\", \"red_team_chat\": \"", "{2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2 {3}\".format(self.discord_message_prefix, channel, sender, content) async def", "specs)*\"} if channel.name not in handled_channels: return if self.is_filtered_message(msg): return if channel.name in", "self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic on a set", "== 1: return member[0] # if direct searches for the match fail, we", "text that shall be escaped for discord chat channels \"\"\" escaped_text = text.replace('_',", "if not self.discord_show_relay_channel_names and channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content)", "interactions on the discord server if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger)", "the relay channel topic as well as the trigger channels, when configured. :param", "server\" content = \"_{} called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def", "channel ids where the topic suffix will be kept upon updating. * qlx_discordUpdateTopicInterval", "set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\",", "0: return \"\" players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True) if limit: players_by_score", "password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum", "relay channels back to Quake Live. if message.channel.id in self.discord_relay_channel_ids: content = message.clean_content", "more than one matching channel, let's tell the player about this. if len(channel)", "auth(self, ctx, password: str): \"\"\" Handles the authentication to the bot via private", "caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\" content = \"_{} called a", ":param limit: (default: None) just list the top players up to the given", "class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the minqlx plugin's bot to provide", "related code. The basic ideas for this plugin came from Gelenkbusfahrer and roast", "passed: content = \"*Vote passed ({} - {}).*\".format(*votes) else: content = \"*Vote failed.*\"", "the context the trigger happened in :param qlx_command: the command that was sent", "command error handler so that no exception is produced for command errors Might", "topic) # keep the topic suffix on the channels that are configured accordingly", "a map is changed. The method sends a corresponding message to the discord", "allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message): \"\"\" relay a message", "\"You can now use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow", "bot :param ctx: the context the trigger happened in :param qlx_command: the command", "\"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2 and msg[1] == \"connect\":", "topic: the topic to set on all the channels \"\"\" if not self.is_discord_logged_in():", "\"\" for player in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def", "send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to respond to", "configured discord channels. There are two basic types of relay in this basic", "channel ids for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of", "*(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"} if channel.name not in handled_channels: return", "a corresponding message to the discord relay channels. :param caller: the player that", "Checks whether a message was sent on a private chat to the bot", "num_players = len(Plugin.players()) max_players = game.maxclients maptitle = game.map_title if game.map_title else game.map", "\"0\") enables extended logging for the discord library (logs to minqlx_discord.log in the", "plugin came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on", "text representation of the player's of that team by their score \"\"\" if", "= int(item) int_set.add(value) return int_set def status(self): if self.discord is None: return \"No", "if len(member) == 1: return member[0] # if direct searches for the match", "portions of the channel name channel = [ch for ch in channel_list if", "her/him from authentication for 5 minutes (300 seconds) bar_delay = 300 await self.reply_to_context(ctx,", "qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids for kept topic suffixes and the", "sent to discord :param ctx: the context the trigger happened in \"\"\" await", "of the triggered relay channels' topics! return \"{0} on **{1}** ({2}) with **{3}/{4}**", "MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the minqlx plugin's bot to provide help", "Command, DefaultHelpCommand import discord.ext.tasks plugin_version = \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\"", "commands from discord * qlx_discordTriggerTriggeredChannelChat (default: \"quakelive\") Message prefix for the trigger on", "ctx, password: str): \"\"\" Handles the authentication to the bot via private message", "<https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on the fragstealers_inc discord tech channel of", "replaces a mentioned discord user (indicated by @user-hint with a real mention :param", "version 1.5 of the mydiscordbot, you also need to enable the Server Members", "def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter", "message was sent on a private chat to the bot :param ctx: the", "self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat,", "the \"#channel\" has at least three characters, and is either # prefixed by", "the discord server :param player: (default: None) when several alternatives are found for", "version_information string :param logger: the logger used for logging, usually passed through from", "votes: the final votes :param vote: the initial vote that passed or failed,", "if len(msg) > 2 or (len(msg) == 2 and msg[1] not in [\"status\",", "from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks self.add_hook(\"unload\",", "properly formatted channel mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message", "command errors Might be changed in the future to log those problems to", "\"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else:", "None is returned in that case. :return: the matching member, or None if", "of channels to update the topic on :param topic: the topic to set", "the discord relay channels, and updates the relay channel topic as well as", "version command related code. The basic ideas for this plugin came from Gelenkbusfahrer", "expressions for messages that should not be sent from quake live to discord", "channels as well as private authentication to the bot to admin the server.", "Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix", "the configured relay_channels :param msg: the message to send to the relay channel", "\"\"\" replaces a mentioned discord channel (indicated by #channel-hint with a real mention", "self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\" Format the channel, author,", "are completely switched off * qlx_discordEnableVersion (default: \"1\") indicates whether the bot will", "the bot\")) discord_bot.add_command(Command(self.qlx, name=self.discord_exec_prefix, checks=[self.is_private_message, self.is_authed], hidden=True, pass_context=True, help=\"execute minqlx commands on the", "formatter for the minqlx plugin's bot to provide help information. This is a", "by properly formatted user mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message =", "off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the channel name of the discord channel", "priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_disconnect\", self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended)", "\"\"\" Provides the ending_note for the help output. \"\"\" command_name = self.context.invoked_with return", "to the trigger :param msg: the message the player sent (includes the trigger)", "and self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic on the given", "in players_by_score: team_data += \"**{}**({}) \".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg): \"\"\"", "relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of channel ids for relaying", "with **{3}/{4}** players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply", "limit: (default: None) just list the top players up to the given limit", "def setup_extended_logger(self): discordLogger = logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs", "self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler called when a vote", "in \"\"\" return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether a user", "= [user for user in member_list if user.name.lower() == match.lower()] if len(member) ==", "you discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of", "unloaded. :param plugin: the plugin that was unloaded. \"\"\" if plugin == self.__class__.__name__:", "the server and its current game. :return: string of the current top5 scorers", "\"\"\" Updates the topic on the given channels and keeps the topic suffix", "related suffixes. Make sure to use single quotes for the suffixes. * qlx_discordCommandPrefix", "Discord...\") self.disconnect_discord() return if len(msg) == 2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to", "= \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\" Checks whether", "than one matching channel, let's tell the player about this. if len(channel) >", "a message is send through discord. Here the main interaction points either back", "the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default: \"1\") Boolean flag to indicate", "player, etc. :param args: any arguments of the vote, i.e. map name, which", "given limit :return: a discord ready text representation of the player's of that", "take the final 10 characters from the topic, and search for it in", "connects. The method sends a corresponding message to the discord relay channels, and", "the player that originally sent the message :param channel: the channel the original", "plugin's main purpose is to create a relay chat between the Quake Live", "to keep the right portion # of the triggered relay channels' topics! return", "general chat, etc. \"\"\" # when the message did not include anything to", "to Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler for", "the current top5 scorers with the scores and connection time to the server", "to the server \"\"\" player_data = \"\" teams = Plugin.teams() if len(teams['red']) >", "of a message so that it will be displayed nicely in the Quake", "self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information),", "\"Warmup\" if game.state == \"countdown\": return \"Match starting\" if game.roundlimit in [game.blue_score, game.red_score]", "game.state == \"countdown\": return \"Match starting\" if game.roundlimit in [game.blue_score, game.red_score] or game.red_score", "update the topics on all the relay and all the triggered channels :param", "message to the discord relay channels, and updates the relay channel topic as", "caller: the player that initiated the vote :param vote: the vote itself, i.e.", "\"\"\" command_name = self.context.invoked_with return \"Type {0}{1} command for more info on a", "user in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match: len(user_match),", "attempts, we will bar her/him from authentication for 5 minutes (300 seconds) bar_delay", "the discord server if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord", "game.map_title else game.map gametype = game.type_short.upper() # CAUTION: if you change anything on", "is told what the alternatives are. No replacements for the ambiguous substitutions will", "for the trigger on triggered relay channels. * qlx_discordTriggerStatus (default: \"status\") Trigger for", "!version or responses are completely switched off * qlx_displayChannelForDiscordRelayChannels (default: \"1\") display the", "not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True, pass_context=True, help=\"auth with the bot\")) discord_bot.add_command(Command(self.qlx,", "send through discord. Here the main interaction points either back to Quake Live", "[\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return minqlx.RET_USAGE if len(msg) == 2 and msg[1] ==", "players. {5}\".format( ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players, mydiscordbot.player_data()) except minqlx.NonexistentGameError: reply = \"Currently", "the topic suffix will be kept upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount", "triggered_chat(self, ctx, *message: str): \"\"\" Relays a message from the triggered channels to", "no topic suffix self.set_topic_on_discord_channels(topic_channel_ids - self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix on", "relay channels. Your bot needs edit_channel permission for these channels. * qlx_discordKeepTopicSuffixChannelIds (default:", "may be sent back to Quake Live. \"\"\" sender = author.name if author.nick", "\"\" players_by_score = sorted(player_list, key=lambda k: k.score, reverse=True) if limit: players_by_score = players_by_score[:limit]", "minqlx from minqlx import Plugin import discord from discord import ChannelType, AllowedMentions from", "\"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites the channel.reply function to relay", "server via discord private messages to the discord bot. * qlx_discordAuthCommand (default: \"auth\")", "= \"v1.51\" MAP_SUBSCRIBER_KEY = \"minqlx:maps:{}:subscribers\" class mydiscordbot(minqlx.Plugin): \"\"\" The plugin's main purpose is", "= client self.author = author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return", "members_intent = self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False,", "+ len(topic_ending):] if position != -1 else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix", "def tell(self, msg): \"\"\" overwrites the player.tell function to relay messages to discord", "if game.map_title else game.map gametype = game.type_short.upper() # CAUTION: if you change anything", "in channel_list if ch.name.lower() == match.lower()] if len(channel) == 1: return channel[0] #", "mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler of the", "DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class to relay messages to discord \"\"\"", "the relay and triggered channels as well as private authentication to the bot", "the channel to get the topic from :return: the topic of the channel", "qlx_discordUpdateTopicInterval (default: 305) Amount of seconds between automatic topic updates * qlx_discordKeptTopicSuffixes (default:", "= game.maxclients maptitle = game.map_title if game.map_title else game.map gametype = game.type_short.upper() reply", "{}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned", "represents the current game state. \"\"\" ginfo = mydiscordbot.get_game_info(game) num_players = len(Plugin.players()) max_players", "the mentions used, this player is told what the alternatives are. None is", "the list of channels connected to the discord server :param player: (default: None)", "= mydiscordbot.game_status_information(game) top5_players = mydiscordbot.player_data() self.discord.relay_message(\"{}{}\".format(topic, top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel):", "from within minqlx for interactions with discord \"\"\" def __init__(self, client, author, discord_channel):", "= self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix != \"\":", "self.discord.relay_message(content) @minqlx.delay(1) def handle_game_countdown_or_end(self, *args, **kwargs): \"\"\" Handler called when the game is", "message): \"\"\" send a triggered message to the configured triggered_channel :param player: the", "for the bot in order to be able to replace discord user mentions.", "\"1\") indicates whether the bot will respond to !help or responses are completely", "file_handler.setFormatter(file_fmt) discordLogger.addHandler(file_handler) # Console console_fmt = logging.Formatter(\"[%(name)s.%(funcName)s] %(levelname)s: %(message)s\", \"%H:%M:%S\") console_handler = logging.StreamHandler()", "game status information\")) discord_bot.add_command(Command(self.triggered_chat, name=self.discord_trigger_triggered_channel_chat, checks=[self.is_message_in_triggered_channel], pass_context=True, help=\"send [message...] to the Quake Live", "nick, if set member = [user for user in member_list if user.name.lower().find(match.lower()) !=", "* qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of channel ids for triggered relay.", "mentioned users and channels on the discord server. :return: the formatted message that", "channel name channel = [ch for ch in channel_list if ch.name.lower().find(match.lower()) != -1]", "= text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player:", "that originally sent the message :param message: the content of the message \"\"\"", "\"\"\" initializes a discord bot with commands and listeners on this pseudo cog", "message: return # if the bot sent the message himself, do nothing. if", "player: the player that connected :param reason: the reason why the player left", "\"1\") replace mentions (@user and #channel) for triggered messages sent towards the triggered", "None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel, message):", "client which is used to communicate to discord, and provides certain commands in", "more data about the server and its current game. :return: string of the", "\"\"\" escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def", "args): \"\"\" Handler called when a vote was started. The method sends a", "self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\",", "the \"@user\" has at least three characters, and is either # prefixed by", "of the discord channel for configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\")", "current status of the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord", "is in countdown, i.e. about to start. This function mainly updates the topics", "the context the trigger happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx):", "logger): \"\"\" Constructor for the SimpleAsyncDiscord client the discord bot runs in. :param", "channel_ids or len(channel_ids) == 0: return # take the final 10 characters from", "Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool) if extended_logging_enabled:", "bot. * qlx_discordAuthCommand (default: \"auth\") command for authenticating a discord user to the", "this player is told what the alternatives are. No replacements for the ambiguous", "0: return # set the topic in its own thread to avoid blocking", "handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler called when a vote was passed", "def get_game_info(game): \"\"\" Helper to format the current game.state that may be used", "the game state \"\"\" if game.state == \"warmup\": return \"Warmup\" if game.state ==", "player to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller else \"The server\"", "Quake Live chat and discord, where every text message that is happening is", "== 2 and msg[1] == \"connect\": self.logger.info(\"Connecting to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord()", "be sent from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default: \"1\") replace mentions", "\"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args):", "integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False,", "message to all relay channels. \"\"\" game = self.game if game is None:", "<filename>src/main/python/mydiscordbot.py \"\"\" This is a plugin created by ShiN0 Copyright (c) 2017 ShiN0", "relay channels configured, do nothing. if not channel_ids or len(channel_ids) == 0: return", "qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of channel ids where the topic suffix", "list of members connected to the discord server :param player: (default: None) when", "Might be changed in the future to log those problems to the minqlx.logger", "map name, which player to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name) if caller", "\" *(to red team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \" *(to specs)*\"}", "chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg, channel): \"\"\" Handler for reconnecting the", "topic of the channel \"\"\" channel = self.discord.get_channel(channel_id) if channel is None: return", "prefix * qlx_discordEnableHelp (default: \"1\") indicates whether the bot will respond to !help", "info on a command.\".format(self.clean_prefix, command_name) async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\"", "that connected :param reason: the reason why the player left \"\"\" if reason", "= author self.discord_channel = discord_channel super().__init__(name=\"Discord-{}\".format(author.display_name)) @property def steam_id(self): return minqlx.owner() @property def", "have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return # User has reached maximum auth attempts,", "discord channel for configured relay channels * qlx_discordQuakeRelayMessageFilters (default: \"^\\!s$, ^\\!p$\") comma separated", "command for authenticated users to execute server commands from discord * qlx_discordLogToSeparateLogfile (default:", "channel: the chnannel the message was sent to \"\"\" handled_channels = {\"chat\": \"\",", "relaying team chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of channel", "that shall be escaped for discord chat channels \"\"\" escaped_text = text.replace('_', r'\\_')", "came from Gelenkbusfahrer and roast <https://github.com/roasticle/minqlx-plugins/blob/master/discordbot.py> and have been mainly discussed on the", "async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class to", "player.tell function to relay messages to discord :param msg: the msg to send", "self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set))", "that should not be sent from quake live to discord * qlx_discordReplaceMentionsForRelayedMessages (default:", "topics to indicate reveal more data about the server and its current game.", "in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token)) def initialize_bot(self, discord_bot): \"\"\" initializes a discord bot with", "on_ready(self): \"\"\" Function called once the bot connected. Mainly displays status update from", "the server. \"\"\" def __init__(self, version_information, logger): \"\"\" Constructor for the SimpleAsyncDiscord client", "alternatives = \"\" for alternative_member in member: alternatives += \"@{} \".format(alternative_member.name) player.tell(alternatives) return", "that is happening is forwarded to the other system, and some basic Quake", "msg): \"\"\" Checks whether the given message should be filtered and not be", "the future to log those problems to the minqlx.logger \"\"\" pass def _topic_updater(self):", "channel_ids: the ids of the channels the topic should be set upon. :param", "a variant to not interfere with discord's formattings.) :param text: the text that", "is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False, users=True, roles=True)), loop=self.discord.loop) def relay_chat_message(self, player, channel,", "direct channel name match case-sensitive first channel = [ch for ch in channel_list", "use {}{} to execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to 3", "Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when a player connects.", "self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name], Plugin.clean_text(msg)) return self.discord.relay_chat_message(player, handled_channels[channel.name],", "with portions of the channel name channel = [ch for ch in channel_list", "for the help output. \"\"\" command_name = self.context.invoked_with return \"Type {0}{1} command for", "self.discord.relay_message(content) def handle_vote_started(self, caller, vote, args): \"\"\" Handler called when a vote was", "by their score \"\"\" if len(player_list) == 0: return \"\" players_by_score = sorted(player_list,", "SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention) return", "top5_players)) def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler of the !discord command.", "Updates the topic on the given channels and keeps the topic suffix intact", "live will be prefixed with this prefix * qlx_discordEnableHelp (default: \"1\") indicates whether", "player) message = self.replace_channel_mentions(message, player) if self.discord_triggered_channel_message_prefix is not None and \\ self.discord_triggered_channel_message_prefix", "escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler called when a player", "vote was started. The method sends a corresponding message to the discord relay", "if len(msg) == 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\") channel.reply(\"Disconnecting from", "self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\") self.discord_exec_prefix = Plugin.get_cvar(\"qlx_discordExecPrefix\") extended_logging_enabled = Plugin.get_cvar(\"qlx_discordLogToSeparateLogfile\", bool)", "set the topic in its own thread to avoid blocking of the server", "relay a message to the given channel :param player: the player that originally", "== 1: return member[0] # then try a direct match at the user's", "channel.id in self.discord_relay_channel_ids: return \"{0} ^6{1}^7:^2 {2}\".format(self.discord_message_prefix, sender, content) return \"{0} ^5#{1.name} ^6{2}^7:^2", "the message that was sent :param channel: the chnannel the message was sent", "that initiated the vote :param vote: the vote itself, i.e. map change, kick", "self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_command(\"discord\", self.cmd_discord, usage=\"<message>\") self.add_command(\"discordbot\",", "for relaying team chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of", "the plugin's version information\")) def reply_to_context(self, ctx, message): return ctx.send(message) async def version(self,", "towards the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin of", "of seconds between automatic topic updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary of", "\"\"\" Checks whether the given message should be filtered and not be sent", "the topic on :param topic: the topic to set on the given channels", "\"\"\" Handler called when a vote was started. The method sends a corresponding", "ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\" Format the", "\"\"\" if len(msg) > 2 or (len(msg) == 2 and msg[1] not in", "self.logger = logger self.discord = None self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token", "for reconnecting the discord bot to discord in case it gets disconnected. :param", "context the trigger happened in :param qlx_command: the command that was sent by", "topic): \"\"\" Set the topic on a set of channel_ids on discord provided.", "in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe( channel.send(content, allowed_mentions=AllowedMentions(everyone=False,", "version_information self.logger = logger self.discord = None self.authed_discord_ids = set() self.auth_attempts = {}", "message to a set of channel_ids on discord provided. :param channel_ids: the ids", "return \"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return \"Warmup\" @staticmethod def player_data():", "that team by their score \"\"\" if len(player_list) == 0: return \"\" players_by_score", "private message * qlx_discordExecPrefix (default: \"qlx\") command for authenticated users to execute server", ":param reason: the reason why the player left \"\"\" if reason in [\"disconnected\",", "of regular expressions for messages that should not be sent from quake live", "messages from discord via private message to the bot :param ctx: the context", "\"\"\" Relays a message from the triggered channels to minqlx :param ctx: the", "== 0: return # send the message in its own thread to avoid", "is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\")", "+= \"@{} \".format(alternative_member.name) player.tell(alternatives) return None def replace_channel_mentions(self, message, player=None): \"\"\" replaces a", "Comma separated list of channel ids where the topic suffix will be kept", "command for authenticating a discord user to the plugin via private message *", "therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken (default: \"\") The", "replace the channel mentions in :param player: (default: None) when several alternatives are", "discord network take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the", "escape_text_for_discord(text): \"\"\" Escapes the provided player's name for proper formatting to discord (i.e.", "channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg) == 2 and msg[1] == \"disconnect\":", "= Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool)", "version of a discord plugin: * full relay between Quake Live chat and", "Checks whether a message was either sent in a configured relay or triggered", "player that originally sent the message :param channel: the channel the original message", "name or portions of the nick, if set member = [user for user", "(user.nick is not None and user.nick.lower().find(match.lower()) != -1)] if len(member) == 1: return", "self.discord_replace_relayed_mentions or self.discord_replace_triggered_mentions intents = discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False,", "let's tell the player about this. if len(channel) > 1 and player is", "handle_plugin_unload(self, plugin): \"\"\" Handler when a plugin is unloaded to make sure, that", "logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler = RotatingFileHandler(file_path, encoding=\"utf-8\", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG)", "if game.roundlimit in [game.blue_score, game.red_score] or game.red_score < 0 or game.blue_score < 0:", "channels. \"\"\" game = self.game if game is None: return topic = mydiscordbot.game_status_information(game)", "can leave it unchecked. By default, this will be enabled and therefore mandatory.", "command_name) async def send_error_message(self, error): pass class DiscordChannel(minqlx.AbstractChannel): \"\"\" a minqlx channel class", "\"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def handle_map(self, mapname, factory): \"\"\" Handler called when a", "one are found \"\"\" # try a direct channel name match case-sensitive first", "len(member) == 1: return member[0] # then try a direct match at the", "* qlx_discordEnableHelp (default: \"1\") indicates whether the bot will respond to !help or", "the game to derive the status information from :return: the topic that represents", "return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx): \"\"\" Triggers game", "to create a relay chat between the Quake Live chat and configured discord", "ch.name.lower() == match.lower()] if len(channel) == 1: return channel[0] # then we try", "mainly updates the topics of the relay channels and the triggered channels (when", "to the discord bot. * qlx_discordAuthCommand (default: \"auth\") command for authenticating a discord", "Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get the actual cvar values from the server self.discord_message_filters =", "full relay between Quake Live chat and discord, where every text message that", "filtered and not be sent to discord. :param msg: the message to check", "vote itself, i.e. map change, kick player, etc. :param args: any arguments of", "in [\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str", "help=\"display the plugin's version information\")) def reply_to_context(self, ctx, message): return ctx.send(message) async def", "self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self):", "topic suffix will be kept upon updating. * qlx_discordUpdateTopicInterval (default: 305) Amount of", "Live. \"\"\" sender = author.name if author.nick is not None: sender = author.nick", "broadcast channel, and specific messages from another channel. For a description on how", "Station server(s). You need to install discord.py in your python installation, i.e. python3", "player: (default: None) when several alternatives are found for the mentions used, this", "specific messages between discord and Quake Live chat where a prefix needs to", "from minqlx import Plugin import discord from discord import ChannelType, AllowedMentions from discord.ext.commands", "where the topic suffix will be kept upon updating. * qlx_discordUpdateTopicInterval (default: 305)", "> 0: await self.reply_to_context(ctx, \"Wrong password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return", "\"\"\" Send a message to a set of channel_ids on discord provided. :param", "expression will make sure that the \"@user\" has at least three characters, and", "ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\" Format the channel, author, and content", "#channel) for triggered messages sent towards the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\")", "will be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: *", "multiple servers on the same host with the same discord connected to. *", "self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version)", "\"\"\" return ctx.message.channel.id in self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str): \"\"\" Relays", "Plugin.teams() if len(teams['red']) > 0: player_data += \"\\n**R:** {}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0:", "show the usage help text. if len(msg) < 2: return minqlx.RET_USAGE self.discord.triggered_message(player, Plugin.clean_text(\"", "string_set: if item == '': continue value = int(item) int_set.add(value) return int_set def", "message to send to minqlx \"\"\" prefix_length = len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel,", "try to match portions of the name or portions of the nick, if", "a message so that it will be displayed nicely in the Quake Live", "substitutions will happen. :return: the original message replaced by properly formatted channel mentions", "channels the message should be sent to. :param content: the content of the", "for the SimpleAsyncDiscord client the discord bot runs in. :param version_information: the plugin's", "channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not None: returned_message = returned_message.replace(\"#{}\".format(match),", "= topic # preserve the original channel's topic. position = previous_topic.find(topic_ending) topic_suffix =", "the topic on the triggered channels self.set_topic_on_discord_channels({channel_id}, \"{}{}\".format(topic, topic_suffix)) def get_channel_topic(self, channel_id): \"\"\"", "not channel_ids or len(channel_ids) == 0: return # send the message in its", "handled_channels[channel.name], Plugin.clean_text(msg)) @minqlx.delay(3) def handle_player_connect(self, player: minqlx.Player): \"\"\" Handler called when a player", "it gets disconnected. :param player: the player that send to the trigger :param", "None: player.tell(\"Found ^6{}^7 matching discord users for @{}:\".format(len(member), match)) alternatives = \"\" for", "# we found more than one matching channel, let's tell the player about", "Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic = \\", "channel_list: the list of channels connected to the discord server :param player: (default:", "sent for authentication :param password: the password to authenticate \"\"\" if password ==", "the author of the original message. :param content: the message itself, ideally taken", "configured. :param player: the player that connected :param reason: the reason why the", "send the message in its own thread to avoid blocking of the server", "relay in this basic version of a discord plugin: * full relay between", "discord. :param player: the player that sent the message :param msg: the message", "bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False,", "else \"The server\" content = \"_{} called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args)))", "\"\"\" if plugin == self.__class__.__name__: self.discord.stop() @staticmethod def game_status_information(game: minqlx.Game): \"\"\" Generate the", "trigger channels, when configured. :param player: the player that connected \"\"\" content =", "\"\") Plugin.set_cvar_once(\"qlx_discordRelayTeamchatChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordTriggeredChatMessagePrefix\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicOnTriggeredChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordKeepTopicSuffixChannelIds\", \"\") Plugin.set_cvar_once(\"qlx_discordUpdateTopicInterval\", \"305\")", "alternatives are. None is returned in that case. :return: the matching member, or", "*message: str): \"\"\" Relays a message from the triggered channels to minqlx :param", "a case-insensitive direct match with the channel name channel = [ch for ch", "Plugin.set_cvar_once(\"qlx_discordEnableHelp\", \"1\") Plugin.set_cvar_once(\"qlx_discordEnableVersion\", \"1\") Plugin.set_cvar_once(\"qlx_displayChannelForDiscordRelayChannels\", \"1\") Plugin.set_cvar_once(\"qlx_discordQuakeRelayMessageFilters\", r\"^\\!s$, ^\\!p$\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForRelayedMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\")", "self.discord_triggered_channel_ids async def triggered_chat(self, ctx, *message: str): \"\"\" Relays a message from the", "if set member = [user for user in member_list if user.name.lower().find(match.lower()) != -1", "loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect", "taken from message.clean_content to avoid ids of mentioned users and channels on the", "return False def handle_ql_chat(self, player: minqlx.Player, msg, channel: minqlx.AbstractChannel): \"\"\" Handler function for", "for proper formatting to discord (i.e. replace '*' (asterisks) with a variant to", "be barred from authentication for {} seconds.\" .format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay,", "space or at the beginning of the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\")", "= eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix = Plugin.get_cvar(\"qlx_discordCommandPrefix\") self.discord_help_enabled = Plugin.get_cvar(\"qlx_discordEnableHelp\", bool)", "happened in \"\"\" await self.reply_to_context(ctx, \"```{}```\".format(self.version_information)) def is_private_message(self, ctx): \"\"\" Checks whether a", "disconnected. :param player: the player that send to the trigger :param msg: the", "the minqlx plugin's bot to provide help information. This is a customized variation", "ctx: the context the trigger happened in :param message: the message to send", "Quake Live. \"\"\" sender = author.name if author.nick is not None: sender =", "match] if len(channel) == 1: return channel[0] # then try a case-insensitive direct", "message came from. :param author: the author of the original message. :param content:", "def __repr__(self): return \"{} {}\".format(str(self), self.author.display_name) def reply(self, msg): \"\"\" overwrites the channel.reply", "thread is started. We will set up the bot here with the right", "mentions \"\"\" if not self.is_discord_logged_in(): return message returned_message = message # this regular", "messages and setting of channel topics. :param game: the game object to derive", "to use single quotes for the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix", "and used in topics to indicate reveal more data about the server and", "\"@user\" has at least three characters, and is either # prefixed by a", "the server \"\"\" player_data = \"\" teams = Plugin.teams() if len(teams['red']) > 0:", "how to set up a bot for you discord network take a look", "= escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self, player: minqlx.Player, reason): \"\"\" Handler", "to Discord...\") self.connect_discord() return if len(msg) == 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting", "a plugin created by ShiN0 Copyright (c) 2017 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free", "\"countdown\": return \"Match starting\" if game.roundlimit in [game.blue_score, game.red_score] or game.red_score < 0", "sorted(player_list, key=lambda k: k.score, reverse=True) if limit: players_by_score = players_by_score[:limit] team_data = \"\"", "set of channels to update the topic on :param topic: the topic to", "line, you may need to change the topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)`", "async def auth(self, ctx, password: str): \"\"\" Handles the authentication to the bot", "current game state on triggered relay channels. Your bot needs edit_channel permission for", "and sets the bot to playing Quake Live on discord. \"\"\" self.logger.info(\"Logged in", "user.name.lower().find(match.lower()) != -1 or (user.nick is not None and user.nick.lower().find(match.lower()) != -1)] if", "to relay messages to discord \"\"\" def __init__(self, client, author, discord_channel): self.client =", "player about this. if len(member) > 1 and player is not None: player.tell(\"Found", "dm_typing=False) # init the bot, and init the main discord interactions if self.discord_help_enabled:", "import RotatingFileHandler import minqlx from minqlx import Plugin import discord from discord import", "or discord happen. :param message: the message that was sent. \"\"\" # guard", "members connected to the discord server :param player: (default: None) when several alternatives", "channel: minqlx.AbstractChannel): \"\"\" Handler function for all chat messages on the server. This", "the mydiscordbot, you also need to enable the Server Members Intent for the", "As of version 1.5 of the mydiscordbot, you also need to enable the", "not None and user.nick.lower().find(match.lower()) != -1)] if len(member) == 1: return list(member)[0] #", "the match to look for in the channel name :param channel_list: the list", "runs in. :param version_information: the plugin's version_information string :param logger: the logger used", "to the discord triggered relay channels. :param player: the player that send to", "happened in \"\"\" return ctx.message.channel.id in self.discord_relay_channel_ids | self.discord_triggered_channel_ids async def trigger_status(self, ctx):", "forwarded to the other system, and some basic Quake Live status updates are", "for authentication :param password: the password to authenticate \"\"\" if password == self.discord_admin_password:", "in self.discord.get_all_members()] matches = matcher.findall(returned_message) for match in sorted(matches, key=lambda user_match: len(user_match), reverse=True):", "Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command = Plugin.get_cvar(\"qlx_discordAuthCommand\")", "in :param player: (default: None) when several alternatives are found for the mentions", "await self.reply_to_context(ctx, \"You have been successfully authenticated. \" \"You can now use {}{}", "(default: \"\") Prefix any triggered message from QL with this text portion. Useful", "all the relay and all the triggered channels :param topic: the topic to", "\"\"\" discord_bot.add_command(Command(self.auth, name=self.discord_auth_command, checks=[self.is_private_message, lambda ctx: not self.is_authed(ctx), lambda ctx: not self.is_barred_from_auth(ctx)], hidden=True,", "0: await self.reply_to_context(ctx, \"Wrong password. You have {} attempts left.\" .format(self.auth_attempts[ctx.message.author.id])) return #", "in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return True return False def handle_ql_chat(self,", "message: the message that was sent. \"\"\" # guard clause to avoid None", "the topic of the provided channel id :param channel_id: the id of the", "beginning of the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for", "1: return channel[0] # then we try a match with portions of the", "for the ambiguous substitutions will happen. :return: the original message replaced by properly", "[game.blue_score, game.red_score] or game.red_score < 0 or game.blue_score < 0: return \"Match ended:", "the server for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None:", "the ambiguous substitutions will happen. :return: the original message replaced by properly formatted", "Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self,", "str): \"\"\" Relays a message from the triggered channels to minqlx :param ctx:", "qlx_command: the command that was sent by the user \"\"\" @minqlx.next_frame def f():", "originate in a configured triggered channel :param ctx: the context the trigger happened", "the new map :param factory: the map factory used \"\"\" content = \"*Changing", "self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def is_message_in_triggered_channel(self, ctx): \"\"\"", "channels \"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids", "position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if position != -1 else", "starting\" if game.roundlimit in [game.blue_score, game.red_score] or game.red_score < 0 or game.blue_score <", "def cmd_discord(self, player: minqlx.Player, msg, channel): \"\"\" Handler of the !discord command. Forwards", "first member = [user for user in member_list if user.name.lower() == match.lower()] if", "name first member = [user for user in member_list if user.name.lower() == match.lower()]", "provide help information. This is a customized variation of discord.py's :class:`DefaultHelpCommand`. \"\"\" def", "# get the actual cvar values from the server self.discord_message_filters = Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set)", "\"\"\" Escapes the provided player's name for proper formatting to discord (i.e. replace", "quotes for the suffixes. * qlx_discordCommandPrefix (default: \"!\") Command prefix for all commands", "Plugin.get_cvar(\"qlx_discordEnableHelp\", bool) self.discord_version_enabled = Plugin.get_cvar(\"qlx_discordEnableVersion\", bool) self.discord_trigger_status = Plugin.get_cvar(\"qlx_discordTriggerStatus\") self.discord_message_prefix = Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names", "of channel ids for full relay. * qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list", "to connect to discord. * qlx_discordRelayChannelIds (default: \"\") Comma separated list of channel", "remote admin of the server via discord private messages to the discord bot.", "logging.getLogger(\"discord\") discordLogger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar(\"fs_homepath\"), \"minqlx_discord.log\") maxlogs = minqlx.Plugin.get_cvar(\"qlx_logs\", int) maxlogsize", "the trigger channels, when configured. :param player: the player that connected \"\"\" content", "on the same host with the same discord connected to. * qlx_discordUpdateTopicOnTriggeredChannels (default:", "the original message sent for authentication :param password: the password to authenticate \"\"\"", "topic): \"\"\" Helper function to update the topics on all the relay and", "if not self.discord_triggered_channel_ids: return if self.discord_replace_triggered_mentions: message = self.replace_user_mentions(message, player) message = self.replace_channel_mentions(message,", "(default: \"1\") display the channel name of the discord channel for configured relay", "channel name of the discord channel for configured relay channels * qlx_discordQuakeRelayMessageFilters (default:", "vote, i.e. map name, which player to kick, etc. \"\"\" caller_name = mydiscordbot.escape_text_for_discord(caller.clean_name)", "# if there are not triggered relay channels configured, do nothing. if not", "change anything on the next line, you may need to change the topic_ending", "message.clean_content if len(content) > 0: minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(message.channel, message.author, content)) async def on_command_error(self, exception,", "be enabled and therefore mandatory. Check <https://discordpy.readthedocs.io/en/latest/intents.html#privileged-intents> for a description. Uses: * qlx_discordBotToken", "({2}) with **{3}/{4}** players. \".format(ginfo, Plugin.clean_text(maptitle), gametype, num_players, max_players) @staticmethod def get_game_info(game): \"\"\"", "def handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler called when a vote was", "msg: the message to check whether it should be filtered :return whether the", "ctx.message.channel)) except Exception as e: send_message = ctx.send(\"{}: {}\".format(e.__class__.__name__, e)) asyncio.run_coroutine_threadsafe(send_message, loop=ctx.bot.loop) minqlx.log_exception()", "the original channel's topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position + len(topic_ending):] if", "None: return None return channel.topic def stop(self): \"\"\" stops the discord client \"\"\"", "\"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a minqlx dummy player class to relay", "message should be sent to. :param content: the content of the message to", "processing. if not message: return # if the bot sent the message himself,", "channel.reply(\"Disconnecting from Discord...\") self.disconnect_discord() return if len(msg) == 2 and msg[1] == \"reconnect\":", "and its interactions on the discord server if discord_client is None: self.discord =", "nick :param member_list: the list of members connected to the discord server :param", "i.e. you did configured and of the qlx_discordReplaceMentions cvars as '0', you can", "for channel_id in channel_ids: channel = self.discord.get_channel(channel_id) if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic),", "self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message, player=None): \"\"\" replaces a mentioned discord user (indicated", "channel_ids, do nothing. if not channel_ids or len(channel_ids) == 0: return # send", "if game.state == \"in_progress\": return \"Match in progress: **{}** - **{}**\".format(game.red_score, game.blue_score) return", "Plugin.get_cvar(\"qlx_discordQuakeRelayMessageFilters\", set) # adding general plugin hooks self.add_hook(\"unload\", self.handle_plugin_unload) self.add_hook(\"chat\", self.handle_ql_chat, priority=minqlx.PRI_LOWEST) self.add_hook(\"player_connect\",", "i.e. map change, kick player, etc. :param args: any arguments of the vote,", "all relay channels. \"\"\" game = self.game if game is None: return topic", "plugin's bot to provide help information. This is a customized variation of discord.py's", "bot runs in. :param version_information: the plugin's version_information string :param logger: the logger", "self.discord_update_triggered_channels_topic = \\ Plugin.get_cvar(\"qlx_discordUpdateTopicOnTriggeredChannels\", bool) self.discord_topic_update_interval = Plugin.get_cvar(\"qlx_discordUpdateTopicInterval\", int) self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\",", "def find_channel_that_matches(match, channel_list, player=None): \"\"\" find a channel that matches the given match", ":param player: the player that sent the message :param msg: the message that", "interfere with discord's formattings.) :param text: the text that shall be escaped for", "by their score :param player_list: the list of players to generate the team", "command that was sent by the user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input(", "# CAUTION: if you change anything on the next line, you may need", "its own thread to avoid blocking of the server for channel_id in channel_ids:", "the trigger channels, when configured. :param mapname: the new map :param factory: the", "this regular expression will make sure that the \"#channel\" has at least three", "@minqlx.thread def connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in():", "SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client self.logger.info(\"Connecting to Discord...\") self.discord.start() self.logger.info(self.version_information()) Plugin.msg(self.version_information()) def", "def replace_channel_mentions(self, message, player=None): \"\"\" replaces a mentioned discord channel (indicated by #channel-hint", "channel_list if ch.name == match] if len(channel) == 1: return channel[0] # then", "it should be filtered :return whether the message should not be relayed to", "Quake Live server\")) discord_bot.add_listener(self.on_ready) discord_bot.add_listener(self.on_message) if self.discord_version_enabled: discord_bot.add_command(Command(self.version, name=\"version\", pass_context=True, ignore_extra=False, help=\"display the", "escaped_text = text.replace('_', r'\\_') escaped_text = escaped_text.replace('*', r\"\\*\") return escaped_text @minqlx.delay(3) def handle_player_disconnect(self,", "connection time to the server \"\"\" player_data = \"\" teams = Plugin.teams() if", "= minqlx.Plugin.get_cvar(\"qlx_logsSize\", int) file_fmt = logging.Formatter(\"(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s\", \"%H:%M:%S\") file_handler =", "msg: the message to send to the relay channel \"\"\" self.send_to_discord_channels(self.discord_relay_channel_ids, msg) def", "out\", \"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason) else: reason_str = \"was kicked", "def __init__(self): super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the ending_note for the help", "& self.discord_keep_topic_suffix_channel_ids, topic) def set_topic_on_discord_channels(self, channel_ids, topic): \"\"\" Set the topic on a", "that was sent. \"\"\" # guard clause to avoid None messages from processing.", "be able to replace discord user mentions. If you don't need that, i.e.", "await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You will be barred from authentication", "content = \"_{} called a vote: {} {}_\".format(caller_name, vote, mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self,", "user.nick.lower().find(match.lower()) != -1)] if len(member) == 1: return list(member)[0] # we found more", "in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel", "if member is not None: returned_message = returned_message.replace(\"@{}\".format(match), member.mention) return returned_message @staticmethod def", "\".format(mydiscordbot.escape_text_for_discord(player.clean_name), player.score) return team_data def is_filtered_message(self, msg): \"\"\" Checks whether the given message", "Message prefix for the trigger on triggered relay channels. * qlx_discordTriggerStatus (default: \"status\")", "ch.name.lower().find(match.lower()) != -1] if len(channel) == 1: return channel[0] # we found more", "be forwarded. These two modes can be combined, i.e. full relay to a", "will forward and messages on the Quake Live server to discord. :param player:", "information from :return: the topic that represents the current game state. \"\"\" ginfo", "to be forwarded. These two modes can be combined, i.e. full relay to", "are two basic types of relay in this basic version of a discord", "Plugin.get_cvar(\"qlx_discordMessagePrefix\") self.discord_show_relay_channel_names = Plugin.get_cvar(\"qlx_displayChannelForDiscordRelayChannels\", bool) self.discord_replace_relayed_mentions = Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\",", "discord server. :return: the formatted message that may be sent back to Quake", "def _topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError: return topic = mydiscordbot.game_status_information(game) self.update_topics_on_relay_and_triggered_channels(topic)", "context the trigger happened in \"\"\" try: game = minqlx.Game() ginfo = mydiscordbot.get_game_info(game)", "the relay and all the triggered channels :param topic: the topic to set", "set on all the channels \"\"\" if not self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids", "server if discord_client is None: self.discord = SimpleAsyncDiscord(self.version_information(), self.logger) else: self.discord = discord_client", "len(\"{}{} \".format(ctx.prefix, ctx.invoked_with)) minqlx.CHAT_CHANNEL.reply( self._format_message_to_quake(ctx.message.channel, ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\"", "def version_information(self): return \"{} Version: {}\".format(self.name, plugin_version) def handle_plugin_unload(self, plugin): \"\"\" Handler when", "topic_ending logic in # :func:`mydiscordbot.update_topic_on_triggered_channels(self, topic)` to keep the right portion # of", "> 1 and player is not None: player.tell(\"Found ^6{}^7 matching discord channels for", "mapname: the new map :param factory: the map factory used \"\"\" content =", "self.discord.is_ready() def update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic on the given channels", "not None: player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel), match)) alternatives = \"\"", "a player connects. The method sends a corresponding message to the discord relay", "self.handle_player_disconnect, priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end,", "vote that passed or failed, i.e. map change, kick player, etc. :param args:", "usage=\"[status]|connect|disconnect|reconnect\") # initialize the discord bot and its interactions on the discord server", "other system, and some basic Quake Live status updates are send to discord", "else: reason_str = \"was kicked ({}).\".format(mydiscordbot.escape_text_for_discord(Plugin.clean_text(reason))) content = \"_{} {}_\".format(mydiscordbot.escape_text_for_discord(player.clean_name), reason_str) self.discord.relay_message(content) def", "to the discord relay channels. and updates the relay channel topic as well", "logging, usually passed through from the minqlx plugin. \"\"\" super().__init__() self.version_information = version_information", "ctx: the context the trigger happened in \"\"\" return ctx.message.author.id in self.authed_discord_ids def", "user's discord id to authenticate. if ctx.message.author.id not in self.auth_attempts: self.auth_attempts[ctx.message.author.id] = 3", "was sent by the user \"\"\" @minqlx.next_frame def f(): try: minqlx.COMMANDS.handle_input( DiscordDummyPlayer(self, ctx.message.author,", "originating channel :param ctx: the context the trigger happened in \"\"\" try: game", "relay channels. :param caller: the player that initiated the vote :param vote: the", "will respond to !help or responses are completely switched off * qlx_discordEnableVersion (default:", "= SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player) if channel is not None: returned_message = returned_message.replace(\"#{}\".format(match), channel.mention)", "for logging, usually passed through from the minqlx plugin. \"\"\" super().__init__() self.version_information =", "* qlx_discordReplaceMentionsForTriggeredMessages (default: \"1\") replace mentions (@user and #channel) for triggered messages sent", "in member_list if user.name.lower().find(match.lower()) != -1 or (user.nick is not None and user.nick.lower().find(match.lower())", "\"\"\" A help formatter for the minqlx plugin's bot to provide help information.", "in handled_channels: return if self.is_filtered_message(msg): return if channel.name in [\"red_team_chat\", \"blue_team_chat\"]: self.discord.relay_team_chat_message(player, handled_channels[channel.name],", "of channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\") Prefix any triggered", "well as the trigger channels, when configured. :param player: the player that connected", "mydiscordbot.escape_text_for_discord(Plugin.clean_text(args))) self.discord.relay_message(content) def handle_vote_ended(self, votes, vote, args, passed): \"\"\" Handler called when a", "update_topic_on_channels_and_keep_channel_suffix(self, channel_ids, topic): \"\"\" Updates the topic on the given channels and keeps", "discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for the discord library (logs", "-1 else previous_topic if channel_id in self.discord_kept_topic_suffixes: topic_suffix = self.discord_kept_topic_suffixes[channel_id] # update the", "i.e. about to start. This function mainly updates the topics of the relay", ".format(bar_delay)) def f(): del self.auth_attempts[ctx.message.author.id] threading.Timer(bar_delay, f).start() async def qlx(self, ctx, *qlx_command: str):", "discord. \"\"\" self.logger.info(\"Logged in to discord as: {} ({})\".format(self.discord.user.name, self.discord.user.id)) Plugin.msg(\"Connected to discord\")", "Comma separated list of channel ids for triggered relay. * qlx_discordTriggeredChatMessagePrefix (default: \"\")", "= Plugin.get_cvar(\"qlx_discordReplaceMentionsForRelayedMessages\", bool) self.discord_replace_triggered_mentions = \\ Plugin.get_cvar(\"qlx_discordReplaceMentionsForTriggeredMessages\", bool) self.discord_admin_password = Plugin.get_cvar(\"<PASSWORD>AdminPassword\") self.discord_auth_command =", "= logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) discordLogger.addHandler(console_handler) @staticmethod def int_set(string_set): int_set = set() for item", "mentioned discord user (indicated by @user-hint with a real mention :param message: the", "to Discord...\") channel.reply(\"Connecting to Discord...\") self.connect_discord() return if len(msg) == 2 and msg[1]", "message. :param content: the message itself, ideally taken from message.clean_content to avoid ids", "discord.Intents(members=members_intent, guilds=True, bans=False, emojis=False, integrations=False, webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False,", "channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote admin of the server via", "alternatives = \"\" for alternative_channel in channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return", "return \"Type {0}{1} command for more info on a command.\".format(self.clean_prefix, command_name) async def", "= topic[-10:] for channel_id in channel_ids: previous_topic = self.get_channel_topic(channel_id) if previous_topic is None:", "as well as the trigger channels, when configured. :param player: the player that", "commands from discord * qlx_discordLogToSeparateLogfile (default: \"0\") enables extended logging for the discord", "message to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\" a", "# set the topic in its own thread to avoid blocking of the", "self.is_discord_logged_in(): return \"Discord connection up and running.\" return \"Discord client not connected.\" def", "indicates whether the bot will respond to !help or responses are completely switched", "the given channels and keeps the topic suffix intact on the configured channels", "the bot sent the message himself, do nothing. if message.author == self.discord.user: return", "messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False) # init the bot,", "don't need that, i.e. you did configured and of the qlx_discordReplaceMentions cvars as", "* qlx_discordRelayTeamchatChannelIds (default: \"\") Comma separated list of channel ids for relaying team", "self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids = SimpleAsyncDiscord.int_set(Plugin.get_cvar(\"qlx_discordRelayChannelIds\", set)) self.discord_relay_team_chat_channel_ids = SimpleAsyncDiscord.int_set(", "init the bot, and init the main discord interactions if self.discord_help_enabled: self.discord =", "the server\")) discord_bot.add_command(Command(self.trigger_status, name=self.discord_trigger_status, checks=[self.is_message_in_relay_or_triggered_channel], pass_context=True, ignore_extra=False, help=\"display current game status information\")) discord_bot.add_command(Command(self.triggered_chat,", "minqlx.NonexistentGameError: reply = \"Currently no game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix,", "or (len(msg) == 2 and msg[1] not in [\"status\", \"connect\", \"disconnect\", \"reconnect\"]): return", "the topic from :return: the topic of the channel \"\"\" channel = self.discord.get_channel(channel_id)", "characters, and is either # prefixed by a space or at the beginning", "\"Match starting\" if game.roundlimit in [game.blue_score, game.red_score] or game.red_score < 0 or game.blue_score", "reason): \"\"\" Handler called when a player disconnects. The method sends a corresponding", "for status messages and used in topics to indicate reveal more data about", "# init the bot, and init the main discord interactions if self.discord_help_enabled: self.discord", "if self.discord_help_enabled: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=MinqlxHelpCommand(), loop=loop, intents=intents) else: self.discord = Bot(command_prefix=self.discord_command_prefix,", "authentication to the bot to admin the server. \"\"\" def __init__(self, version_information, logger):", "game running.\" if self.is_message_in_triggered_channel(ctx): reply = \"{0} {1}\".format(self.discord_triggered_channel_message_prefix, reply) await self.reply_to_context(ctx, reply) def", "channel: alternatives += \"#{} \".format(alternative_channel.name) player.tell(alternatives) return None def triggered_message(self, player, message): \"\"\"", "player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message):", "= re.compile(message_filter) if matcher.match(msg): return True return False def handle_ql_chat(self, player: minqlx.Player, msg,", "configured. :param mapname: the new map :param factory: the map factory used \"\"\"", "discord from discord import ChannelType, AllowedMentions from discord.ext.commands import Bot, Command, DefaultHelpCommand import", "member = [user for user in member_list if user.name.lower().find(match.lower()) != -1 or (user.nick", "1 and player is not None: player.tell(\"Found ^6{}^7 matching discord channels for #{}:\".format(len(channel),", "2 and msg[1] == \"reconnect\": self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord()", "\"\"\" Handles exec messages from discord via private message to the bot :param", "handle_map(self, mapname, factory): \"\"\" Handler called when a map is changed. The method", "gametype = game.type_short.upper() # CAUTION: if you change anything on the next line,", "and sends a message to all relay channels. \"\"\" game = self.game if", "bot to admin the server. \"\"\" def __init__(self, version_information, logger): \"\"\" Constructor for", "msg: the msg to send to this player \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class SimpleAsyncDiscord(threading.Thread):", "self.is_discord_logged_in(): return if self.discord_update_triggered_channels_topic: topic_channel_ids = self.discord_relay_channel_ids | self.discord_triggered_channel_ids else: topic_channel_ids = self.discord_relay_channel_ids", "self.discord_keep_topic_suffix_channel_ids, topic) # keep the topic suffix on the channels that are configured", "channel.topic def stop(self): \"\"\" stops the discord client \"\"\" if self.discord is None:", "if reason in [\"disconnected\", \"timed out\", \"was kicked\", \"was kicked.\"]: reason_str = \"{}.\".format(reason)", "self.discord_keep_topic_suffix_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordKeepTopicSuffixChannelIds\", set)) self.discord_kept_topic_suffixes = eval(Plugin.get_cvar(\"qlx_discordKeptTopicSuffixes\", str)) self.discord_trigger_triggered_channel_chat = Plugin.get_cvar(\"qlx_discordTriggerTriggeredChannelChat\") self.discord_command_prefix", "{}\".format(mydiscordbot.team_data(teams['red'])) if len(teams['blue']) > 0: player_data += \"\\n**B:** {}\".format(mydiscordbot.team_data(teams['blue'])) return player_data @staticmethod def", "channels, and updates the relay channel topic as well as the trigger channels,", "i.e. map name, which player to kick, etc. :param passed: boolean indicating whether", "for match in sorted(matches, key=lambda channel_match: len(channel_match), reverse=True): channel = SimpleAsyncDiscord.find_channel_that_matches(match, channel_list, player)", "triggered message to the configured triggered_channel :param player: the player that originally sent", "[ch for ch in channel_list if ch.name == match] if len(channel) == 1:", "if channel is None: continue asyncio.run_coroutine_threadsafe(channel.edit(topic=topic), loop=self.discord.loop) def is_discord_logged_in(self): if self.discord is None:", "message_filter in self.discord_message_filters: matcher = re.compile(message_filter) if matcher.match(msg): return True return False def", "(indicated by #channel-hint with a real mention :param message: the message to replace", "== \"countdown\": return \"Match starting\" if game.roundlimit in [game.blue_score, game.red_score] or game.red_score <", "== 1: return channel[0] # we found more than one matching channel, let's", "super().__init__(no_category=\"minqlx Commands\") def get_ending_note(self): \"\"\" Provides the ending_note for the help output. \"\"\"", "the message to send to this channel \"\"\" self.client.send_to_discord_channels({self.discord_channel.id}, Plugin.clean_text(msg)) class DiscordDummyPlayer(minqlx.AbstractDummyPlayer): \"\"\"", "bot in the game console and server logfile, and sets the bot to", "the minqlx.logger \"\"\" pass def _topic_updater(self): try: game = minqlx.Game() except minqlx.NonexistentGameError: return", "responses are completely switched off * qlx_discordEnableVersion (default: \"1\") indicates whether the bot", "typing=False, guild_typing=False, dm_typing=False) # init the bot, and init the main discord interactions", "member = [user for user in member_list if user.nick is not None and", "game.blue_score < 0: return \"Match ended: **{}** - **{}**\".format(game.red_score, game.blue_score) if game.state ==", "is not None and \\ self.discord_triggered_channel_message_prefix != \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix,", "(300 seconds) bar_delay = 300 await self.reply_to_context(ctx, \"Maximum authentication attempts reached. \" \"You", "is currently barred from authentication to the bot :param ctx: the context the", "the given match :param match: the match to look for in the user", "Handler of the !discord command. Forwards any messages after !discord to the discord", "up and running.\" return \"Discord client not connected.\" def run(self): \"\"\" Called when", ":param message: the message to replace the channel mentions in :param player: (default:", "format the current game.state that may be used in status messages and setting", "status of the game server. * qlx_discordMessagePrefix (default: \"[DISCORD]\") messages from discord to", "\"1\") Plugin.set_cvar_once(\"qlx_discordReplaceMentionsForTriggeredMessages\", \"1\") Plugin.set_cvar_once(\"qlx_discordAdminPassword\", \"<PASSWORD>\") Plugin.set_cvar_once(\"qlx_discordAuthCommand\", \"auth\") Plugin.set_cvar_once(\"qlx_discordExecPrefix\", \"qlx\") Plugin.set_cvar_once(\"qlx_discordLogToSeparateLogfile\", \"0\") # get", "ctx.message.author, ctx.message.clean_content[prefix_length:])) def _format_message_to_quake(self, channel, author, content): \"\"\" Format the channel, author, and", "self.discord = None self.authed_discord_ids = set() self.auth_attempts = {} self.discord_bot_token = Plugin.get_cvar(\"qlx_discordBotToken\") self.discord_relay_channel_ids", "minqlx.Player, msg, channel): \"\"\" Handler for reconnecting the discord bot to discord in", "qlx_discordTriggeredChannelIds (default: \"\") Comma separated list of channel ids for triggered relay. *", "^6{}^7 matching discord channels for #{}:\".format(len(channel), match)) alternatives = \"\" for alternative_channel in", ":param topic: the topic to set on all the channels \"\"\" if not", "message # this regular expression will make sure that the \"@user\" has at", "in this basic version of a discord plugin: * full relay between Quake", "commands, and run the discord.py bot in a new event_loop until completed. \"\"\"", "the related suffixes. Make sure to use single quotes for the suffixes. *", "game.roundlimit in [game.blue_score, game.red_score] or game.red_score < 0 or game.blue_score < 0: return", "not self.discord.is_discord_logged_in(): return self.discord.stop() class MinqlxHelpCommand(DefaultHelpCommand): \"\"\" A help formatter for the minqlx", "self.authed_discord_ids def is_barred_from_auth(self, ctx): \"\"\" Checks whether an author is currently barred from", "a space or at the beginning of the string matcher = re.compile(\"(?:^| )#([^", "discord bot. * qlx_discordAuthCommand (default: \"auth\") command for authenticating a discord user to", "\"\", \"red_team_chat\": \" *(to red team)*\", \"blue_team_chat\": \" *(to blue team)*\", \"spectator_chat\": \"", "int_set = set() for item in string_set: if item == '': continue value", "up the bot here with the right commands, and run the discord.py bot", "current top5 scorers with the scores and connection time to the server \"\"\"", "Trigger for having the bot send the current status of the game server.", "trigger_status(self, ctx): \"\"\" Triggers game status information sent towards the originating channel :param", "guard clause to avoid None messages from processing. if not message: return #", "closed when this plugin is unloaded. :param plugin: the plugin that was unloaded.", "* qlx_discordBotToken (default: \"\") The token of the discord bot to use to", "topics on all the relay and all the triggered channels :param topic: the", "of the game state \"\"\" if game.state == \"warmup\": return \"Warmup\" if game.state", "!= \"\": content = \"{} **{}**: {}\".format(self.discord_triggered_channel_message_prefix, mydiscordbot.escape_text_for_discord(player.clean_name), message) else: content = \"**{}**:", "return None def triggered_message(self, player, message): \"\"\" send a triggered message to the", "connect_discord(self): if self.discord.is_discord_logged_in(): return self.discord.run() @minqlx.thread def disconnect_discord(self): if not self.discord.is_discord_logged_in(): return self.discord.stop()", "topic # preserve the original channel's topic. position = previous_topic.find(topic_ending) topic_suffix = previous_topic[position", "whole name first member = [user for user in member_list if user.name.lower() ==", "self.logger.info(\"Reconnecting to Discord...\") channel.reply(\"Reconnecting to Discord...\") self.disconnect_discord() self.connect_discord() return channel.reply(self.discord.status()) return @minqlx.thread def", "messages sent towards the triggered channels * qlx_discordAdminPassword (default \"<PASSWORD>\") passwort for remote", "to start. This function mainly updates the topics of the relay channels and", "* qlx_discordAuthCommand (default: \"auth\") command for authenticating a discord user to the plugin", "else: self.discord = Bot(command_prefix=self.discord_command_prefix, description=\"{}\".format(self.version_information), help_command=None, loop=loop, intents=intents) self.initialize_bot(self.discord) # connect the now", "msg, channel): \"\"\" Handler of the !discord command. Forwards any messages after !discord", "No replacements for the ambiguous substitutions will happen. :return: the original message replaced", "priority=minqlx.PRI_LOWEST) self.add_hook(\"map\", self.handle_map) self.add_hook(\"vote_started\", self.handle_vote_started) self.add_hook(\"vote_ended\", self.handle_vote_ended) self.add_hook(\"game_countdown\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST) self.add_hook(\"game_end\", self.handle_game_countdown_or_end, priority=minqlx.PRI_LOWEST)", "the new topic that should be set. \"\"\" # if we were not", "take a look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the mydiscordbot, you", "SimpleAsyncDiscord(threading.Thread): \"\"\" SimpleAsyncDiscord client which is used to communicate to discord, and provides", "qlx_discordReplaceMentions cvars as '0', you can leave it unchecked. By default, this will", "self.replace_channel_mentions(message, player) content = \"**{}**{}: {}\".format(mydiscordbot.escape_text_for_discord(player.clean_name), channel, message) self.send_to_discord_channels(self.discord_relay_team_chat_channel_ids, content) def replace_user_mentions(self, message,", "is forwarded to the other system, and some basic Quake Live status updates", "updates * qlx_discordKeptTopicSuffixes (default: {}) A dictionary of channel_ids for kept topic suffixes", "self.discord.triggered_message(player, Plugin.clean_text(\" \".join(msg[1:]))) self.msg(\"Message to Discord chat cast!\") def cmd_discordbot(self, player: minqlx.Player, msg,", "!= -1] if len(channel) == 1: return channel[0] # we found more than", "channels. * qlx_discordKeepTopicSuffixChannelIds (default: \"\") Comma separated list of channel ids where the", "the player about this. if len(channel) > 1 and player is not None:", "factory used \"\"\" content = \"*Changing map to {}...*\".format(mydiscordbot.escape_text_for_discord(mapname)) self.discord.relay_message(content) def handle_vote_started(self, caller,", "ctx.send(message) async def version(self, ctx): \"\"\" Triggers the plugin's version information sent to", "execute commands.\" .format(self.discord_command_prefix, self.discord_exec_prefix)) return # Allow up to 3 attempts for the", "ids for relaying team chat messages. * qlx_discordTriggeredChannelIds (default: \"\") Comma separated list", "if limit: players_by_score = players_by_score[:limit] team_data = \"\" for player in players_by_score: team_data", "ctx, *qlx_command: str): \"\"\" Handles exec messages from discord via private message to", "authentication attempts reached. \" \"You will be barred from authentication for {} seconds.\"", "message: the content of the message \"\"\" if self.discord_replace_relayed_mentions: message = self.replace_user_mentions(message, player)", "look `here <https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token>`. As of version 1.5 of the mydiscordbot, you also need", "return isinstance(ctx.message.channel, discord.DMChannel) def is_authed(self, ctx): \"\"\" Checks whether a user is authed", "be used for the messages to be forwarded. These two modes can be", "value may be used for status messages and used in topics to indicate", "messages after !discord to the discord triggered relay channels. :param player: the player", "message) self.relay_message(content) def relay_team_chat_message(self, player, channel, message): \"\"\" relay a team_chat message, that", "reply_to_context(self, ctx, message): return ctx.send(message) async def version(self, ctx): \"\"\" Triggers the plugin's", "mention :param message: the message to replace the user mentions in :param player:", "the string matcher = re.compile(\"(?:^| )@([^ ]{3,})\") member_list = [user for user in", "initiated the vote :param vote: the vote itself, i.e. map change, kick player,", "def stop(self): \"\"\" stops the discord client \"\"\" if self.discord is None: return", "of the !discord command. Forwards any messages after !discord to the discord triggered", "self.initialize_bot(self.discord) # connect the now configured bot to discord in the event_loop self.discord.loop.run_until_complete(self.discord.start(self.discord_bot_token))", "webhooks=False, invites=False, voice_states=False, presences=False, messages=True, guild_messages=True, dm_messages=True, reactions=False, guild_reactions=False, dm_reactions=False, typing=False, guild_typing=False, dm_typing=False)", "self.connect_discord() return if len(msg) == 2 and msg[1] == \"disconnect\": self.logger.info(\"Disconnecting from Discord...\")", "= SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordRelayTeamchatChannelIds\", set)) self.discord_triggered_channel_ids = SimpleAsyncDiscord.int_set( Plugin.get_cvar(\"qlx_discordTriggeredChannelIds\", set)) self.discord_triggered_channel_message_prefix = Plugin.get_cvar(\"qlx_discordTriggeredChatMessagePrefix\") self.discord_update_triggered_channels_topic", "player by their score :param player_list: the list of players to generate the", "as well as private authentication to the bot to admin the server. \"\"\"", "topic)` to keep the right portion # of the triggered relay channels' topics!", "name, which player to kick, etc. :param passed: boolean indicating whether the vote" ]
[ "\"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\")", "\" + str(value[1][0]) + \"\\n\") for tcnum in value[1][3]: ofile.write(str(tcnum) + \"\\n\") ofile.write(\"\\n\")", "start point of collecting words if len(sys.argv) <= 1: corpus_type = \"bow\" else:", "<max_words> specify the number of words the preprocessed file used # <startw> the", "TopicIO from nltk.corpus import wordnet as wn from nltk.corpus import reuters import os", "if len(sys.argv) <= 6: max_words = 250 else: max_words = int(sys.argv[6]) if len(sys.argv)", "calculation of topic coherence # <max_words> specify the number of words the preprocessed", "if sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif sys.argv[1] == \"b\": corpus_type =", "<= 5: words_count = 10 else: words_count = int(sys.argv[5]) if len(sys.argv) <= 6:", "topic evaluation values tclist = [] te = WordNetEvaluator() for index, topic in", "= int(sys.argv[5]) if len(sys.argv) <= 6: max_words = 250 else: max_words = int(sys.argv[6])", "# read topics tio = TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname =", "dname + name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation values tclist = []", "\" + str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \"", "enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort the list by a descending", "words the preprocessed file used # <startw> the start point of collecting words", "= int(sys.argv[2]) if len(sys.argv) <= 3: src = \"pp_reuters\" else: src = sys.argv[3]", "corpus_type = \"tfidf\" elif sys.argv[1] == \"b\": corpus_type = \"binary\" else: corpus_type =", "the number of words the preprocessed file used # <startw> the start point", "= \"pp_reuters\" else: src = sys.argv[3] if len(sys.argv) <= 4: tc = \"path\"", "= sys.argv[4] if len(sys.argv) <= 5: words_count = 10 else: words_count = int(sys.argv[5])", "a descending order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results if", "else: max_words = int(sys.argv[6]) if len(sys.argv) <= 7: startw = 0 else: startw", "10 else: words_count = int(sys.argv[5]) if len(sys.argv) <= 6: max_words = 250 else:", "words_count, ifname, startw=startw)]) # sort the list by a descending order tclist =", "elif sys.argv[1] == \"b\": corpus_type = \"binary\" else: corpus_type = \"bow\" if len(sys.argv)", "ifname, startw=startw)]) # sort the list by a descending order tclist = list(reversed(sorted(tclist,", "import WordNetEvaluator import sys import utils.name_convention as name from topic.topicio import TopicIO from", "for LDA # <wordnet method> default to path # <word count> the number", "4: tc = \"path\" else: tc = sys.argv[4] if len(sys.argv) <= 5: words_count", "tc = sys.argv[4] if len(sys.argv) <= 5: words_count = 10 else: words_count =", "specify the number of words the preprocessed file used # <startw> the start", "if len(sys.argv) <= 4: tc = \"path\" else: tc = sys.argv[4] if len(sys.argv)", "tclist = [] te = WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic,", "of words the preprocessed file used # <startw> the start point of collecting", "to bag of words. b for binary, t for tf-idf, anything else or", "which contains documents for LDA # <wordnet method> default to path # <word", "<reponame>Renata1995/Topic-Distance-and-Coherence<gh_stars>1-10 from coherence.wn import WordNetEvaluator import sys import utils.name_convention as name from topic.topicio", "= WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) #", "+ str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for tcnum in", "tc + \"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\")", "+ str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \" +", "te = WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)])", "ifname = dname + name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation values tclist", "= list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname", "ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum", "nltk.corpus import reuters import os # # syntax: python coh_wn_read.py <corpus type> <#", "\"binary\" else: corpus_type = \"bow\" if len(sys.argv) <= 2: topics_count = 3 else:", "= sys.argv[3] if len(sys.argv) <= 4: tc = \"path\" else: tc = sys.argv[4]", "read topics tio = TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname = dname", "# <wordnet method> default to path # <word count> the number of top", "in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort the list by a", "number of top words used in the calculation of topic coherence # <max_words>", "tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc)", "coh_wn_read.py <corpus type> <# of topics> <src> <word count> # <corpus type> default", "\"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\") for value", "collecting words if len(sys.argv) <= 1: corpus_type = \"bow\" else: if sys.argv[1] ==", "tf-idf, anything else or missing for bag of words # <# of topics>", "topics_count, src) # read topics tio = TopicIO() tlist = tio.read_topics(dname + name.topics_dir())", "+ \"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\") for", "syntax: python coh_wn_read.py <corpus type> <# of topics> <src> <word count> # <corpus", "of topics> <src> <word count> # <corpus type> default to bag of words.", "\"b\": corpus_type = \"binary\" else: corpus_type = \"bow\" if len(sys.argv) <= 2: topics_count", "src = \"pp_reuters\" else: src = sys.argv[3] if len(sys.argv) <= 4: tc =", "topic coherence # <max_words> specify the number of words the preprocessed file used", "= 3 else: topics_count = int(sys.argv[2]) if len(sys.argv) <= 3: src = \"pp_reuters\"", "descending order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results if not", "of words # <# of topics> number of topics. default to 8 #", "evaluation values tclist = [] te = WordNetEvaluator() for index, topic in enumerate(tlist):", "default to 8 # <src> src folder which contains documents for LDA #", "LDA # <wordnet method> default to path # <word count> the number of", "\"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\") for value in tclist: ofile.write(\"Topic \"", "= int(sys.argv[6]) if len(sys.argv) <= 7: startw = 0 else: startw = int(sys.argv[7])", "\".txt\" ofile = open(ofname, \"w\") for value in tclist: ofile.write(\"Topic \" + str(value[0])", "words. b for binary, t for tf-idf, anything else or missing for bag", "open(ofname, \"w\") for value in tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean", "str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0])", "os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" + tc + \"/w0\" + str(words_count)", "\"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\")", "max_words = 250 else: max_words = int(sys.argv[6]) if len(sys.argv) <= 7: startw =", "# syntax: python coh_wn_read.py <corpus type> <# of topics> <src> <word count> #", "utils.name_convention as name from topic.topicio import TopicIO from nltk.corpus import wordnet as wn", "<# of topics> <src> <word count> # <corpus type> default to bag of", "if len(sys.argv) <= 5: words_count = 10 else: words_count = int(sys.argv[5]) if len(sys.argv)", "src = sys.argv[3] if len(sys.argv) <= 4: tc = \"path\" else: tc =", "tc = \"path\" else: tc = sys.argv[4] if len(sys.argv) <= 5: words_count =", "x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\"", "\" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \"", "b for binary, t for tf-idf, anything else or missing for bag of", "int(sys.argv[2]) if len(sys.argv) <= 3: src = \"pp_reuters\" else: src = sys.argv[3] if", "len(sys.argv) <= 7: startw = 0 else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type,", "path # <word count> the number of top words used in the calculation", "value in tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1])", "count> # <corpus type> default to bag of words. b for binary, t", "= [] te = WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count,", "str(words_count) + \"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\") for value in tclist:", "sys.argv[1] == \"b\": corpus_type = \"binary\" else: corpus_type = \"bow\" if len(sys.argv) <=", "+ \"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) +", "\"path\" else: tc = sys.argv[4] if len(sys.argv) <= 5: words_count = 10 else:", "name from topic.topicio import TopicIO from nltk.corpus import wordnet as wn from nltk.corpus", "<= 1: corpus_type = \"bow\" else: if sys.argv[1] == \"t\": corpus_type = \"tfidf\"", "file used # <startw> the start point of collecting words if len(sys.argv) <=", "by a descending order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results", "topic.topicio import TopicIO from nltk.corpus import wordnet as wn from nltk.corpus import reuters", "for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort the", "contains documents for LDA # <wordnet method> default to path # <word count>", "reuters import os # # syntax: python coh_wn_read.py <corpus type> <# of topics>", "default to path # <word count> the number of top words used in", "\"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for tcnum in value[1][3]: ofile.write(str(tcnum) +", "x: x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname +", "+ \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2]) +", "# <max_words> specify the number of words the preprocessed file used # <startw>", "250 else: max_words = int(sys.argv[6]) if len(sys.argv) <= 7: startw = 0 else:", "+ name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation", "default to bag of words. b for binary, t for tf-idf, anything else", "<word count> the number of top words used in the calculation of topic", "ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for", "tio = TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname = dname + name.te_preprocess(tc,", "WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort", "+ \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for tcnum in value[1][3]: ofile.write(str(tcnum)", "7: startw = 0 else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src)", "dname = name.get_output_dir(corpus_type, topics_count, src) # read topics tio = TopicIO() tlist =", "name.get_output_dir(corpus_type, topics_count, src) # read topics tio = TopicIO() tlist = tio.read_topics(dname +", "from nltk.corpus import reuters import os # # syntax: python coh_wn_read.py <corpus type>", "corpus_type = \"bow\" if len(sys.argv) <= 2: topics_count = 3 else: topics_count =", "topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort the list by", "ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median", "0 else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) # read topics", "ofile = open(ofname, \"w\") for value in tclist: ofile.write(\"Topic \" + str(value[0]) +", "to 8 # <src> src folder which contains documents for LDA # <wordnet", "python coh_wn_read.py <corpus type> <# of topics> <src> <word count> # <corpus type>", "<# of topics> number of topics. default to 8 # <src> src folder", "not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" + tc + \"/w0\" +", "= \"path\" else: tc = sys.argv[4] if len(sys.argv) <= 5: words_count = 10", "else or missing for bag of words # <# of topics> number of", "output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" + tc", "from coherence.wn import WordNetEvaluator import sys import utils.name_convention as name from topic.topicio import", "tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\")", "max_words, startw=startw) # calculate topic evaluation values tclist = [] te = WordNetEvaluator()", "= int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) # read topics tio = TopicIO()", "import TopicIO from nltk.corpus import wordnet as wn from nltk.corpus import reuters import", "in the calculation of topic coherence # <max_words> specify the number of words", "<word count> # <corpus type> default to bag of words. b for binary,", "corpus_type = \"binary\" else: corpus_type = \"bow\" if len(sys.argv) <= 2: topics_count =", "<= 7: startw = 0 else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count,", "order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc):", "\" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for tcnum", "# <word count> the number of top words used in the calculation of", "3 else: topics_count = int(sys.argv[2]) if len(sys.argv) <= 3: src = \"pp_reuters\" else:", "= 0 else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) # read", "int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) # read topics tio = TopicIO() tlist", "in tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) +", "[] te = WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname,", "+ str(words_count) + \"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\") for value in", "type> <# of topics> <src> <word count> # <corpus type> default to bag", "the list by a descending order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) #", "= 10 else: words_count = int(sys.argv[5]) if len(sys.argv) <= 6: max_words = 250", "used # <startw> the start point of collecting words if len(sys.argv) <= 1:", "startw = 0 else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) #", "count> the number of top words used in the calculation of topic coherence", "words # <# of topics> number of topics. default to 8 # <src>", "else: src = sys.argv[3] if len(sys.argv) <= 4: tc = \"path\" else: tc", "8 # <src> src folder which contains documents for LDA # <wordnet method>", "name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation values", "os # # syntax: python coh_wn_read.py <corpus type> <# of topics> <src> <word", "topics> number of topics. default to 8 # <src> src folder which contains", "= TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words,", "of topic coherence # <max_words> specify the number of words the preprocessed file", "the start point of collecting words if len(sys.argv) <= 1: corpus_type = \"bow\"", "for binary, t for tf-idf, anything else or missing for bag of words", "<= 2: topics_count = 3 else: topics_count = int(sys.argv[2]) if len(sys.argv) <= 3:", "+ name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation values tclist = [] te", "preprocessed file used # <startw> the start point of collecting words if len(sys.argv)", "dname + \"/\" + tc + \"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\"", "corpus_type = \"bow\" else: if sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif sys.argv[1]", "import utils.name_convention as name from topic.topicio import TopicIO from nltk.corpus import wordnet as", "of collecting words if len(sys.argv) <= 1: corpus_type = \"bow\" else: if sys.argv[1]", "results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" + tc +", "if len(sys.argv) <= 7: startw = 0 else: startw = int(sys.argv[7]) dname =", "# sort the list by a descending order tclist = list(reversed(sorted(tclist, key=lambda x:", "as name from topic.topicio import TopicIO from nltk.corpus import wordnet as wn from", "values tclist = [] te = WordNetEvaluator() for index, topic in enumerate(tlist): tclist.append([index,", "sort the list by a descending order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2])))", "words_count = 10 else: words_count = int(sys.argv[5]) if len(sys.argv) <= 6: max_words =", "6: max_words = 250 else: max_words = int(sys.argv[6]) if len(sys.argv) <= 7: startw", "te.get_values(topic, words_count, ifname, startw=startw)]) # sort the list by a descending order tclist", "startw=startw) # calculate topic evaluation values tclist = [] te = WordNetEvaluator() for", "len(sys.argv) <= 4: tc = \"path\" else: tc = sys.argv[4] if len(sys.argv) <=", "# <startw> the start point of collecting words if len(sys.argv) <= 1: corpus_type", "or missing for bag of words # <# of topics> number of topics.", "else: if sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif sys.argv[1] == \"b\": corpus_type", "+ \"/\" + tc + \"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\" ofile", "nltk.corpus import wordnet as wn from nltk.corpus import reuters import os # #", "coherence # <max_words> specify the number of words the preprocessed file used #", "bag of words # <# of topics> number of topics. default to 8", "\"t\": corpus_type = \"tfidf\" elif sys.argv[1] == \"b\": corpus_type = \"binary\" else: corpus_type", "missing for bag of words # <# of topics> number of topics. default", "if len(sys.argv) <= 2: topics_count = 3 else: topics_count = int(sys.argv[2]) if len(sys.argv)", "1: corpus_type = \"bow\" else: if sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif", "list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname =", "for bag of words # <# of topics> number of topics. default to", "to path # <word count> the number of top words used in the", "if len(sys.argv) <= 3: src = \"pp_reuters\" else: src = sys.argv[3] if len(sys.argv)", "coherence.wn import WordNetEvaluator import sys import utils.name_convention as name from topic.topicio import TopicIO", "== \"b\": corpus_type = \"binary\" else: corpus_type = \"bow\" if len(sys.argv) <= 2:", "else: corpus_type = \"bow\" if len(sys.argv) <= 2: topics_count = 3 else: topics_count", "= \"bow\" else: if sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif sys.argv[1] ==", "topics tio = TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname = dname +", "key=lambda x: x[1][2]))) # output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname", "else: words_count = int(sys.argv[5]) if len(sys.argv) <= 6: max_words = 250 else: max_words", "the preprocessed file used # <startw> the start point of collecting words if", "top words used in the calculation of topic coherence # <max_words> specify the", "\"bow\" if len(sys.argv) <= 2: topics_count = 3 else: topics_count = int(sys.argv[2]) if", "point of collecting words if len(sys.argv) <= 1: corpus_type = \"bow\" else: if", "of topics. default to 8 # <src> src folder which contains documents for", "from topic.topicio import TopicIO from nltk.corpus import wordnet as wn from nltk.corpus import", "= 250 else: max_words = int(sys.argv[6]) if len(sys.argv) <= 7: startw = 0", "= \"bow\" if len(sys.argv) <= 2: topics_count = 3 else: topics_count = int(sys.argv[2])", "bag of words. b for binary, t for tf-idf, anything else or missing", "sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif sys.argv[1] == \"b\": corpus_type = \"binary\"", "= tio.read_topics(dname + name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words, startw=startw) # calculate", "len(sys.argv) <= 3: src = \"pp_reuters\" else: src = sys.argv[3] if len(sys.argv) <=", "tlist = tio.read_topics(dname + name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words, startw=startw) #", "TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words, startw=startw)", "<src> <word count> # <corpus type> default to bag of words. b for", "words_count = int(sys.argv[5]) if len(sys.argv) <= 6: max_words = 250 else: max_words =", "else: tc = sys.argv[4] if len(sys.argv) <= 5: words_count = 10 else: words_count", "<= 3: src = \"pp_reuters\" else: src = sys.argv[3] if len(sys.argv) <= 4:", "# calculate topic evaluation values tclist = [] te = WordNetEvaluator() for index,", "of words. b for binary, t for tf-idf, anything else or missing for", "= dname + name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation values tclist =", "os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" + tc + \"/w0\" + str(words_count) +", "list by a descending order tclist = list(reversed(sorted(tclist, key=lambda x: x[1][2]))) # output", "import reuters import os # # syntax: python coh_wn_read.py <corpus type> <# of", "anything else or missing for bag of words # <# of topics> number", "number of topics. default to 8 # <src> src folder which contains documents", "src) # read topics tio = TopicIO() tlist = tio.read_topics(dname + name.topics_dir()) ifname", "\"pp_reuters\" else: src = sys.argv[3] if len(sys.argv) <= 4: tc = \"path\" else:", "str(value[0]) + \"\\n\") ofile.write(\"Mean \" + str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2])", "of topics> number of topics. default to 8 # <src> src folder which", "else: startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) # read topics tio", "wn from nltk.corpus import reuters import os # # syntax: python coh_wn_read.py <corpus", "sys.argv[3] if len(sys.argv) <= 4: tc = \"path\" else: tc = sys.argv[4] if", "int(sys.argv[6]) if len(sys.argv) <= 7: startw = 0 else: startw = int(sys.argv[7]) dname", "words used in the calculation of topic coherence # <max_words> specify the number", "of top words used in the calculation of topic coherence # <max_words> specify", "topics_count = int(sys.argv[2]) if len(sys.argv) <= 3: src = \"pp_reuters\" else: src =", "+ str(value[1][1]) + \"\\n\") ofile.write(\"Median \" + str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" +", "= \"tfidf\" elif sys.argv[1] == \"b\": corpus_type = \"binary\" else: corpus_type = \"bow\"", "import wordnet as wn from nltk.corpus import reuters import os # # syntax:", "<wordnet method> default to path # <word count> the number of top words", "= name.get_output_dir(corpus_type, topics_count, src) # read topics tio = TopicIO() tlist = tio.read_topics(dname", "ofname = dname + \"/\" + tc + \"/w0\" + str(words_count) + \"_start\"+str(startw)", "len(sys.argv) <= 5: words_count = 10 else: words_count = int(sys.argv[5]) if len(sys.argv) <=", "max_words = int(sys.argv[6]) if len(sys.argv) <= 7: startw = 0 else: startw =", "len(sys.argv) <= 2: topics_count = 3 else: topics_count = int(sys.argv[2]) if len(sys.argv) <=", "sys import utils.name_convention as name from topic.topicio import TopicIO from nltk.corpus import wordnet", "WordNetEvaluator import sys import utils.name_convention as name from topic.topicio import TopicIO from nltk.corpus", "src folder which contains documents for LDA # <wordnet method> default to path", "\"w\") for value in tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean \"", "<= 6: max_words = 250 else: max_words = int(sys.argv[6]) if len(sys.argv) <= 7:", "\"tfidf\" elif sys.argv[1] == \"b\": corpus_type = \"binary\" else: corpus_type = \"bow\" if", "+ \".txt\" ofile = open(ofname, \"w\") for value in tclist: ofile.write(\"Topic \" +", "\"bow\" else: if sys.argv[1] == \"t\": corpus_type = \"tfidf\" elif sys.argv[1] == \"b\":", "as wn from nltk.corpus import reuters import os # # syntax: python coh_wn_read.py", "<startw> the start point of collecting words if len(sys.argv) <= 1: corpus_type =", "calculate topic evaluation values tclist = [] te = WordNetEvaluator() for index, topic", "# <corpus type> default to bag of words. b for binary, t for", "topics. default to 8 # <src> src folder which contains documents for LDA", "words if len(sys.argv) <= 1: corpus_type = \"bow\" else: if sys.argv[1] == \"t\":", "# <# of topics> number of topics. default to 8 # <src> src", "= dname + \"/\" + tc + \"/w0\" + str(words_count) + \"_start\"+str(startw) +", "used in the calculation of topic coherence # <max_words> specify the number of", "else: topics_count = int(sys.argv[2]) if len(sys.argv) <= 3: src = \"pp_reuters\" else: src", "import sys import utils.name_convention as name from topic.topicio import TopicIO from nltk.corpus import", "str(value[1][2]) + \"\\n\") ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for tcnum in value[1][3]:", "if len(sys.argv) <= 1: corpus_type = \"bow\" else: if sys.argv[1] == \"t\": corpus_type", "the number of top words used in the calculation of topic coherence #", "# <src> src folder which contains documents for LDA # <wordnet method> default", "\"/\" + tc + \"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\" ofile =", "number of words the preprocessed file used # <startw> the start point of", "int(sys.argv[5]) if len(sys.argv) <= 6: max_words = 250 else: max_words = int(sys.argv[6]) if", "folder which contains documents for LDA # <wordnet method> default to path #", "the calculation of topic coherence # <max_words> specify the number of words the", "+ \"_start\"+str(startw) + \".txt\" ofile = open(ofname, \"w\") for value in tclist: ofile.write(\"Topic", "<= 4: tc = \"path\" else: tc = sys.argv[4] if len(sys.argv) <= 5:", "5: words_count = 10 else: words_count = int(sys.argv[5]) if len(sys.argv) <= 6: max_words", "+ tc + \"/w0\" + str(words_count) + \"_start\"+str(startw) + \".txt\" ofile = open(ofname,", "len(sys.argv) <= 6: max_words = 250 else: max_words = int(sys.argv[6]) if len(sys.argv) <=", "3: src = \"pp_reuters\" else: src = sys.argv[3] if len(sys.argv) <= 4: tc", "wordnet as wn from nltk.corpus import reuters import os # # syntax: python", "sys.argv[4] if len(sys.argv) <= 5: words_count = 10 else: words_count = int(sys.argv[5]) if", "t for tf-idf, anything else or missing for bag of words # <#", "= open(ofname, \"w\") for value in tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\")", "type> default to bag of words. b for binary, t for tf-idf, anything", "documents for LDA # <wordnet method> default to path # <word count> the", "startw = int(sys.argv[7]) dname = name.get_output_dir(corpus_type, topics_count, src) # read topics tio =", "2: topics_count = 3 else: topics_count = int(sys.argv[2]) if len(sys.argv) <= 3: src", "if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" + tc + \"/w0\"", "# # syntax: python coh_wn_read.py <corpus type> <# of topics> <src> <word count>", "len(sys.argv) <= 1: corpus_type = \"bow\" else: if sys.argv[1] == \"t\": corpus_type =", "startw=startw)]) # sort the list by a descending order tclist = list(reversed(sorted(tclist, key=lambda", "import os # # syntax: python coh_wn_read.py <corpus type> <# of topics> <src>", "method> default to path # <word count> the number of top words used", "topics_count = 3 else: topics_count = int(sys.argv[2]) if len(sys.argv) <= 3: src =", "ofile.write(\"Sum \" + str(value[1][0]) + \"\\n\") for tcnum in value[1][3]: ofile.write(str(tcnum) + \"\\n\")", "<corpus type> default to bag of words. b for binary, t for tf-idf,", "binary, t for tf-idf, anything else or missing for bag of words #", "== \"t\": corpus_type = \"tfidf\" elif sys.argv[1] == \"b\": corpus_type = \"binary\" else:", "<corpus type> <# of topics> <src> <word count> # <corpus type> default to", "from nltk.corpus import wordnet as wn from nltk.corpus import reuters import os #", "= \"binary\" else: corpus_type = \"bow\" if len(sys.argv) <= 2: topics_count = 3", "topics> <src> <word count> # <corpus type> default to bag of words. b", "tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort the list by a descending order", "for value in tclist: ofile.write(\"Topic \" + str(value[0]) + \"\\n\") ofile.write(\"Mean \" +", "index, topic in enumerate(tlist): tclist.append([index, te.get_values(topic, words_count, ifname, startw=startw)]) # sort the list", "<src> src folder which contains documents for LDA # <wordnet method> default to", "name.te_preprocess(tc, max_words, startw=startw) # calculate topic evaluation values tclist = [] te =", "for tf-idf, anything else or missing for bag of words # <# of", "tio.read_topics(dname + name.topics_dir()) ifname = dname + name.te_preprocess(tc, max_words, startw=startw) # calculate topic", "# output results if not os.path.exists(dname+\"/\"+tc): os.makedirs(dname+\"/\"+tc) ofname = dname + \"/\" +" ]
[ "sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound}", "of all units. Does not need to be sorted. \"\"\" target = None", "of all units. Does not need to be sorted. \"\"\" for unit in", "location: GridLocation) -> List[GridLocation]: \"\"\" Return a list of the open locations around", "a path. Now see if it's a better candidate than one already found", "distanceTo(self, other): \"\"\" Return the Manhattan distance between this unit and other Keyword", "if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit points", "\"\"\" availableList = [] for unit in units: if unit.hitPoints > 0 and", "unit in units if unit.race == \"E\" and unit.hitPoints > 0]) if survivingElfCount", "survivingElfCount = 0 while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower += 1 cave,", "cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit in units if", "or survivingElfCount < originalElfCount: elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount", "self.y) def sameLocation(self, other): \"\"\" Return True if this unit is at the", "GridLocation) -> List[GridLocation]: \"\"\" Return a list of the open locations around the", "if unit.hitPoints > 0] units = sorted(livingUnits) if __name__ == \"__main__\": goblinsWin =", "or len(path) < len(shortestPath) or \\ len(path) == len(shortestPath) and (pathEnd < targetLocation):", "# The first step in the path is the current location so go", "unit.hitPoints > 0 and unit.race != self.race: return True return False def availableEnemies(self,", "# We found a path. Now see if it's a better candidate than", "with open(puzzleName, \"r\") as infile: puzzleHeight = 0 puzzleWidth = 0 for line", "= nextLocation.row cave[self.y, self.x] = self.race def attack(self, cave, units): \"\"\" Attack an", "break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit for unit", "and self.y == other.y def atLocation(self, x, y): \"\"\" Return True if this", "def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y != other.y: return self.y", "other.y) def canAttack(self, units): \"\"\" Return True if there is an enemy available", "Does not need to be sorted. cave -- The array representing the cave", "{playRound} full rounds\") hitPoints = sum([unit.hitPoints for unit in units]) survivingRace = \"Goblins\"", "units) if not finished: playRound += 1 print(playRound) livingUnits = [unit for unit", "- 1)) if location.column > 0 and row[location.column - 1] == \".\": available.append(GridLocation(location.column", "Create the cave with the determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.',", "return self.x == other.x and self.y == other.y def atLocation(self, x, y): \"\"\"", "and unit.race != self.race: return True return False def availableEnemies(self, cave, units): \"\"\"", "survivingElfCount = len([unit for unit in units if unit.race == \"E\" and unit.hitPoints", "if unit.hitPoints > 0 and unit.race != self.race: return True return False def", "!= self.race and self.distanceTo(unit) == 1: return True return False def enemyExists(self, units):", "target.hitPoints -= self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x] = \".\" def printCave(cave,", "if unit.race == \"E\"]) finished = False playRound = 0 while not finished:", "self.x < other.x def __eq__(self, other): return self.x == other.x and self.y ==", "is not None: target.hitPoints -= self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x] =", "location.column] == \".\": available.append(GridLocation(location.column, location.row - 1)) if location.column > 0 and row[location.column", "this unit and other Keyword arguments: other -- The other unit. \"\"\" return", "= self.availableEnemies(cave, units) for enemy in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave,", "1] == \".\": available.append(GridLocation(location.column + 1, location.row)) if location.row + 1 < len(cave)", "self.availableEnemies(cave, units) for enemy in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations)", "self.x] = '.' # The first step in the path is the current", "len(cave) and cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column, location.row + 1)) return", "- 1, location.column] == \".\": available.append(GridLocation(location.column, location.row - 1)) if location.column > 0", "available.append(GridLocation(location.column, location.row - 1)) if location.column > 0 and row[location.column - 1] ==", "> 0 and row[location.column - 1] == \".\": available.append(GridLocation(location.column - 1, location.row)) if", "the open locations around the given location. The locations are in reading order.", "and self.distanceTo(unit) == 1: if target is None or unit.hitPoints < target.hitPoints or", "other.y: return self.y < other.y return self.x < other.x def __eq__(self, other): return", "A list of all units. Does not need to be sorted. cave --", "int = 200 attackDamage: int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self,", "0: continue if not unit.enemyExists(units): finished = True break if not unit.canAttack(units): unit.move(cave,", "and row[location.column + 1] == \".\": available.append(GridLocation(location.column + 1, location.row)) if location.row +", "True if there is an enemy available to attack. Keyword arguments: units --", "in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G' else elfAttackPower))", "def distanceTo(self, other): \"\"\" Return the Manhattan distance between this unit and other", "We found a path. Now see if it's a better candidate than one", "+= 1 print(playRound) livingUnits = [unit for unit in units if unit.hitPoints >", "units) survivingElfCount = len([unit for unit in units if unit.race == \"E\" and", "after {playRound} full rounds\") hitPoints = sum([unit.hitPoints for unit in units]) survivingRace =", "0 and cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column, location.row - 1)) if", "units, showScores=False): for rowNumber, row in enumerate(cave): scores = \" \" for columnNumber,", "for unit in units: if unit.hitPoints > 0 and unit.race != self.race: return", "GridLocation = None shortestPath = None enemies = self.availableEnemies(cave, units) for enemy in", "def attack(self, cave, units): \"\"\" Attack an available enemy. units -- A list", "the cave with the determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str)", "+= 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the cave with the determined", "while not finished: for unit in units: if unit.hitPoints <= 0: continue if", "== \".\": available.append(GridLocation(location.column - 1, location.row)) if location.column + 1 < len(row) and", "len(row) and row[location.column + 1] == \".\": available.append(GridLocation(location.column + 1, location.row)) if location.row", "+ 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column", "scores = \" \" for columnNumber, cell in enumerate(row): print(cell, end='') if showScores", "self.column < other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return a list", "in units: if unit.hitPoints <= 0: continue if not unit.enemyExists(units): finished = True", "[unit for unit in units if unit.hitPoints > 0] units = sorted(livingUnits) if", "def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return a list of the open", "self.race def attack(self, cave, units): \"\"\" Attack an available enemy. units -- A", "units if unit.race == \"E\"]) finished = False playRound = 0 while not", "typing import List, NamedTuple import numpy as np from generic_search import bfsCave, nodeToPath", "location.row)) if location.column + 1 < len(row) and row[location.column + 1] == \".\":", "scores += str(unit) + \" \" if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName,", "np from generic_search import bfsCave, nodeToPath wall = \"#\" emptySpace = \".\" class", "0 puzzleWidth = 0 for line in infile: puzzleHeight += 1 puzzleWidth =", "finished: playRound += 1 print(playRound) livingUnits = [unit for unit in units if", "a list of the open locations around the given location. The locations are", "# Populate the cave and the list of units. with open(puzzleName, \"r\") as", "location.column] == \".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return", "there is an enemy available to attack. Keyword arguments: units -- A list", "goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1 @dataclass class Unit: x: int y:", "units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber] =", "unit is at the same location as other \"\"\" return self.x == other.x", "== x and self.y == y def distanceTo(self, other): \"\"\" Return the Manhattan", "The enemy does not need to be available for attack. Keyword arguments: units", "len(line.rstrip())) # Create the cave with the determined puzzle dimensions. cave = np.full((puzzleHeight,", "import dataclass from typing import List, NamedTuple import numpy as np from generic_search", "infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the cave with", "print(playRound) livingUnits = [unit for unit in units if unit.hitPoints > 0] units", "unit.race == \"E\"]) finished = False playRound = 0 while not finished: for", "sum([unit.hitPoints for unit in units]) survivingRace = \"Goblins\" if units[0].race == \"G\" else", "__lt__(self, other): return self.row < other.row or \\ self.row == other.row and self.column", "= \".\" def printCave(cave, units, showScores=False): for rowNumber, row in enumerate(cave): scores =", "Populate the cave and the list of units. with open(puzzleName, \"r\") as infile:", "['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G' else elfAttackPower)) cave[rowNumber,", "for enemy in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution:", "len(path) == len(shortestPath) and (pathEnd < targetLocation): targetLocation = pathEnd shortestPath = path", "< target.hitPoints or \\ unit.hitPoints == target.hitPoints and unit < target: target =", "None: targetLocation: GridLocation = None shortestPath = None enemies = self.availableEnemies(cave, units) for", "self.y < other.y return self.x < other.x def __eq__(self, other): return self.x ==", "= False playRound = 0 while not finished: for unit in units: if", "unit.race != self.race and self.distanceTo(unit) == 1: return True return False def enemyExists(self,", "location(self): return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return True if this unit", "if this unit is at this x,y location \"\"\" return self.x == x", "cave \"\"\" availableList = [] for unit in units: if unit.hitPoints > 0", "available for attack. Keyword arguments: units -- A list of all units. Does", "target.hitPoints and unit < target: target = unit if target is not None:", "!= self.race and self.distanceTo(unit) == 1: if target is None or unit.hitPoints <", "other): return self.row < other.row or \\ self.row == other.row and self.column <", "= [] row = cave[location.row] if location.row > 0 and cave[location.row - 1,", "finished = False playRound = 0 while not finished: for unit in units:", "[\"E\", \"G\"]: unit = next(unit for unit in units if unit.hitPoints > 0", "unit.location()): availableList.append(unit) return availableList def move(self, cave, units) -> None: targetLocation: GridLocation =", "cave, openLocations) if solution: path = nodeToPath(solution) # We found a path. Now", "is the current location so go to the second step nextLocation: GridLocation =", "self.x == other.x and self.y == other.y def location(self): return GridLocation(self.x, self.y) def", "in enumerate(row): print(cell, end='') if showScores and cell in [\"E\", \"G\"]: unit =", "\"r\") as infile: for rowNumber, line in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()):", "\"E\"]) finished = False playRound = 0 while not finished: for unit in", "to the second step nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column self.y =", "None for unit in units: if unit.hitPoints > 0 and unit.race != self.race", "other.row and self.column < other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return", "ends after {playRound} full rounds\") hitPoints = sum([unit.hitPoints for unit in units]) survivingRace", "open locations around the given location. The locations are in reading order. \"\"\"", "emptySpace = \".\" class GridLocation(NamedTuple): column: int row: int def __lt__(self, other): return", "other.x and self.y == other.y def location(self): return GridLocation(self.x, self.y) def sameLocation(self, other):", "-- The other unit. \"\"\" return abs(self.x - other.x) + abs(self.y - other.y)", "False def enemyExists(self, units): \"\"\" Return True if an enemy exists. The enemy", "self.row == other.row and self.column < other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]:", "enemy does not need to be available for attack. Keyword arguments: units --", "units) -> None: targetLocation: GridLocation = None shortestPath = None enemies = self.availableEnemies(cave,", "puzzleWidth), '.', dtype=str) units = [] # Populate the cave and the list", "[] for unit in units: if unit.hitPoints > 0 and unit.race != self.race", "end='') if showScores and cell in [\"E\", \"G\"]: unit = next(unit for unit", "return False def availableEnemies(self, cave, units): \"\"\" Return a list of available enemies", "a better candidate than one already found pathEnd = path[-1] if shortestPath is", "locations are in reading order. \"\"\" available = [] row = cave[location.row] if", "win with {hitPoints} total hit points left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound", "abs(self.x - other.x) + abs(self.y - other.y) def canAttack(self, units): \"\"\" Return True", "row = cave[location.row] if location.row > 0 and cave[location.row - 1, location.column] ==", "other \"\"\" return self.x == other.x and self.y == other.y def atLocation(self, x,", "\"\"\" Return True if this unit is at the same location as other", "'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave, units if __name__ ==", "class Unit: x: int y: int race: str hitPoints: int = 200 attackDamage:", "nodeToPath wall = \"#\" emptySpace = \".\" class GridLocation(NamedTuple): column: int row: int", "see if it's a better candidate than one already found pathEnd = path[-1]", "__str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y != other.y: return self.y <", "def availableEnemies(self, cave, units): \"\"\" Return a list of available enemies in the", "cave -- The array representing the cave \"\"\" availableList = [] for unit", "all units. Does not need to be sorted. \"\"\" for unit in units:", "arguments: other -- The other unit. \"\"\" return abs(self.x - other.x) + abs(self.y", "print() def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of the puzzle. with open(puzzleName,", "self.race: return True return False def availableEnemies(self, cave, units): \"\"\" Return a list", "y def distanceTo(self, other): \"\"\" Return the Manhattan distance between this unit and", "\"\"\" return self.x == other.x and self.y == other.y def atLocation(self, x, y):", "Return True if there is an enemy available to attack. Keyword arguments: units", "False playRound = 0 while not finished: for unit in units: if unit.hitPoints", "import numpy as np from generic_search import bfsCave, nodeToPath wall = \"#\" emptySpace", "attackDamage=3 if cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave,", "open(puzzleName, \"r\") as infile: puzzleHeight = 0 puzzleWidth = 0 for line in", "elfAttackPower): # Get the dimensions of the puzzle. with open(puzzleName, \"r\") as infile:", "unit and other Keyword arguments: other -- The other unit. \"\"\" return abs(self.x", "and self.y == other.y def location(self): return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\"", "column: int row: int def __lt__(self, other): return self.row < other.row or \\", "line in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if cell in ['E', 'G']:", "in units: if unit.hitPoints > 0 and unit.race != self.race and openLocations(cave, unit.location()):", "+ abs(self.y - other.y) def canAttack(self, units): \"\"\" Return True if there is", "if not unit.enemyExists(units): finished = True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave,", "if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G'", "cave[self.y, self.x] = '.' # The first step in the path is the", "self.y == other.y def atLocation(self, x, y): \"\"\" Return True if this unit", "return self.x == x and self.y == y def distanceTo(self, other): \"\"\" Return", "0] units = sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat", "rounds\") hitPoints = sum([unit.hitPoints for unit in units]) survivingRace = \"Goblins\" if units[0].race", "unit.attack(cave, units) survivingElfCount = len([unit for unit in units if unit.race == \"E\"", "shortestPath: cave[self.y, self.x] = '.' # The first step in the path is", "move(self, cave, units) -> None: targetLocation: GridLocation = None shortestPath = None enemies", "\"Goblins\" if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit", "\".\" class GridLocation(NamedTuple): column: int row: int def __lt__(self, other): return self.row <", "openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self, cave, units) -> None: targetLocation: GridLocation", "def location(self): return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return True if this", "and cell in [\"E\", \"G\"]: unit = next(unit for unit in units if", "def __lt__(self, other): if self.y != other.y: return self.y < other.y return self.x", "= None for unit in units: if unit.hitPoints > 0 and unit.race !=", "0 for line in infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) #", "in units if unit.hitPoints > 0] units = sorted(livingUnits) if __name__ == \"__main__\":", "> 0] units = sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave, units, showScores=True)", "is None or len(path) < len(shortestPath) or \\ len(path) == len(shortestPath) and (pathEnd", "unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit for unit in units if", "> 0 and unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList def", "this unit is at this x,y location \"\"\" return self.x == x and", "for unit in units]) survivingRace = \"Goblins\" if units[0].race == \"G\" else \"Elves\"", "targetLocation = pathEnd shortestPath = path if shortestPath: cave[self.y, self.x] = '.' #", "cell in enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if", "are in reading order. \"\"\" available = [] row = cave[location.row] if location.row", "1 @dataclass class Unit: x: int y: int race: str hitPoints: int =", "is None or unit.hitPoints < target.hitPoints or \\ unit.hitPoints == target.hitPoints and unit", "units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound} full rounds\") hitPoints", "other): if self.y != other.y: return self.y < other.y return self.x < other.x", "= sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after", "all units. Does not need to be sorted. cave -- The array representing", "\"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished = False playRound = 0 while", "cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = [] # Populate the cave", "if shortestPath: cave[self.y, self.x] = '.' # The first step in the path", "survivingElfCount < originalElfCount: finished = True break if not finished: playRound += 1", "def __lt__(self, other): return self.row < other.row or \\ self.row == other.row and", "unit.enemyExists(units): finished = True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount", "targetLocation): targetLocation = pathEnd shortestPath = path if shortestPath: cave[self.y, self.x] = '.'", "units = sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends", "puzzleWidth = 0 for line in infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth,", "-> None: targetLocation: GridLocation = None shortestPath = None enemies = self.availableEnemies(cave, units)", "cell, attackDamage=3 if cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell return", "not unit.enemyExists(units): finished = True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units)", "\"\"\" for unit in units: if unit.hitPoints > 0 and unit.race != self.race", "other.x) + abs(self.y - other.y) def canAttack(self, units): \"\"\" Return True if there", "self.x == x and self.y == y def distanceTo(self, other): \"\"\" Return the", "[] row = cave[location.row] if location.row > 0 and cave[location.row - 1, location.column]", "distance between this unit and other Keyword arguments: other -- The other unit.", "units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit points left\")", "nextLocation.row cave[self.y, self.x] = self.race def attack(self, cave, units): \"\"\" Attack an available", "printCave(cave, units, showScores=False): for rowNumber, row in enumerate(cave): scores = \" \" for", "= 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y != other.y:", "location so go to the second step nextLocation: GridLocation = shortestPath[1] self.x =", "not None: target.hitPoints -= self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x] = \".\"", "= next(unit for unit in units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber))", "A list of all units. Does not need to be sorted. \"\"\" target", "or \\ self.row == other.row and self.column < other.column def openLocations(cave, location: GridLocation)", "None or len(path) < len(shortestPath) or \\ len(path) == len(shortestPath) and (pathEnd <", "in units]) survivingRace = \"Goblins\" if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win", "livingUnits = [unit for unit in units if unit.hitPoints > 0] units =", "1, location.column] == \".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation):", "or \\ len(path) == len(shortestPath) and (pathEnd < targetLocation): targetLocation = pathEnd shortestPath", "= 0 survivingElfCount = 0 while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower +=", "Keyword arguments: units -- A list of all units. Does not need to", "units if unit.hitPoints > 0] units = sorted(livingUnits) if __name__ == \"__main__\": goblinsWin", "if target is None or unit.hitPoints < target.hitPoints or \\ unit.hitPoints == target.hitPoints", "columnNumber] = cell return cave, units if __name__ == \"15a\": cave, units =", "available enemy. units -- A list of all units. Does not need to", "for columnNumber, cell in enumerate(row): print(cell, end='') if showScores and cell in [\"E\",", "be sorted. \"\"\" for unit in units: if unit.hitPoints > 0 and unit.race", "in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber,", "the list Keyword arguments: units -- A list of all units. Does not", "and unit.hitPoints > 0]) if survivingElfCount < originalElfCount: finished = True break if", "Return True if an enemy exists. The enemy does not need to be", "<= 0: continue if not unit.enemyExists(units): finished = True break if not unit.canAttack(units):", "from typing import List, NamedTuple import numpy as np from generic_search import bfsCave,", "Return a list of available enemies in the list Keyword arguments: units --", "self.race and self.distanceTo(unit) == 1: if target is None or unit.hitPoints < target.hitPoints", "def sameLocation(self, other): \"\"\" Return True if this unit is at the same", "shortestPath = None enemies = self.availableEnemies(cave, units) for enemy in enemies: solution =", "infile: puzzleHeight = 0 puzzleWidth = 0 for line in infile: puzzleHeight +=", "cave[location.row] if location.row > 0 and cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column,", "this unit is at the same location as other \"\"\" return self.x ==", "= path if shortestPath: cave[self.y, self.x] = '.' # The first step in", "finished = True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount =", "unit.race != self.race and self.distanceTo(unit) == 1: if target is None or unit.hitPoints", "-> List[GridLocation]: \"\"\" Return a list of the open locations around the given", "and cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column, location.row - 1)) if location.column", "!= self.race: return True return False def availableEnemies(self, cave, units): \"\"\" Return a", "enemy in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution: path", "units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit in units if unit.race", "end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of the puzzle. with", "== \".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row", "enemies = self.availableEnemies(cave, units) for enemy in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation,", "path = nodeToPath(solution) # We found a path. Now see if it's a", "List, NamedTuple import numpy as np from generic_search import bfsCave, nodeToPath wall =", "other.x def __eq__(self, other): return self.x == other.x and self.y == other.y def", "!= other.y: return self.y < other.y return self.x < other.x def __eq__(self, other):", "pathEnd = path[-1] if shortestPath is None or len(path) < len(shortestPath) or \\", "self.y = nextLocation.row cave[self.y, self.x] = self.race def attack(self, cave, units): \"\"\" Attack", "Attack an available enemy. units -- A list of all units. Does not", "unit.hitPoints <= 0: continue if not unit.enemyExists(units): finished = True break if not", "as other \"\"\" return self.x == other.x and self.y == other.y def atLocation(self,", "other Keyword arguments: other -- The other unit. \"\"\" return abs(self.x - other.x)", "sorted. \"\"\" for unit in units: if unit.hitPoints > 0 and unit.race !=", "if target.hitPoints <= 0: cave[target.y, target.x] = \".\" def printCave(cave, units, showScores=False): for", "loadPuzzle(\"15.txt\", 3) finished = False playRound = 0 while not finished: for unit", "in units: if unit.hitPoints > 0 and unit.race != self.race: return True return", "locations around the given location. The locations are in reading order. \"\"\" available", "in the list Keyword arguments: units -- A list of all units. Does", "showScores=False): for rowNumber, row in enumerate(cave): scores = \" \" for columnNumber, cell", "and unit.race != self.race and self.distanceTo(unit) == 1: if target is None or", "= \" \" for columnNumber, cell in enumerate(row): print(cell, end='') if showScores and", "units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \"", "= bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution: path = nodeToPath(solution) # We", "True if an enemy exists. The enemy does not need to be available", "if target is not None: target.hitPoints -= self.attackDamage if target.hitPoints <= 0: cave[target.y,", "# Get the dimensions of the puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight", "and unit < target: target = unit if target is not None: target.hitPoints", "self.x == other.x and self.y == other.y def atLocation(self, x, y): \"\"\" Return", "len([unit for unit in units if unit.race == \"E\"]) finished = False playRound", "len(shortestPath) and (pathEnd < targetLocation): targetLocation = pathEnd shortestPath = path if shortestPath:", "printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound} full rounds\") hitPoints = sum([unit.hitPoints for", "unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1: if target", "already found pathEnd = path[-1] if shortestPath is None or len(path) < len(shortestPath)", "unit is at this x,y location \"\"\" return self.x == x and self.y", "list of available enemies in the list Keyword arguments: units -- A list", "other): \"\"\" Return True if this unit is at the same location as", "for unit in units if unit.race == \"E\" and unit.hitPoints > 0]) if", "from dataclasses import dataclass from typing import List, NamedTuple import numpy as np", "len(path) < len(shortestPath) or \\ len(path) == len(shortestPath) and (pathEnd < targetLocation): targetLocation", "enemy. units -- A list of all units. Does not need to be", "unit in units if unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin = units[0].race", "finished: for unit in units: if unit.hitPoints <= 0: continue if not unit.enemyExists(units):", "other.y def atLocation(self, x, y): \"\"\" Return True if this unit is at", "location.row + 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) +", "same location as other \"\"\" return self.x == other.x and self.y == other.y", "unit.hitPoints == target.hitPoints and unit < target: target = unit if target is", "else elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave, units if __name__ == \"15a\":", "max(puzzleWidth, len(line.rstrip())) # Create the cave with the determined puzzle dimensions. cave =", "\".\" def printCave(cave, units, showScores=False): for rowNumber, row in enumerate(cave): scores = \"", "list of units. with open(puzzleName, \"r\") as infile: for rowNumber, line in enumerate(infile):", "available to attack. Keyword arguments: units -- A list of all units. Does", "cell in [\"E\", \"G\"]: unit = next(unit for unit in units if unit.hitPoints", "nextLocation.column self.y = nextLocation.row cave[self.y, self.x] = self.race def attack(self, cave, units): \"\"\"", "0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \" \" if len(scores.strip()): print(scores,", "location.row > 0 and cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column, location.row -", "bfsCave, nodeToPath wall = \"#\" emptySpace = \".\" class GridLocation(NamedTuple): column: int row:", "\"\"\" Return True if this unit is at this x,y location \"\"\" return", "attack. Keyword arguments: units -- A list of all units. Does not need", "if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1: return", "y): \"\"\" Return True if this unit is at this x,y location \"\"\"", "= [] for unit in units: if unit.hitPoints > 0 and unit.race !=", "puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the cave with the determined puzzle dimensions.", "np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = [] # Populate the cave and the", "0 and unit.race != self.race and self.distanceTo(unit) == 1: if target is None", "array representing the cave \"\"\" availableList = [] for unit in units: if", "this x,y location \"\"\" return self.x == x and self.y == y def", "= \"#\" emptySpace = \".\" class GridLocation(NamedTuple): column: int row: int def __lt__(self,", "abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1 @dataclass class Unit: x:", "unit. \"\"\" return abs(self.x - other.x) + abs(self.y - other.y) def canAttack(self, units):", "if location.row > 0 and cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column, location.row", "and self.distanceTo(unit) == 1: return True return False def enemyExists(self, units): \"\"\" Return", "unit.race == \"E\" and unit.hitPoints > 0]) if survivingElfCount < originalElfCount: finished =", "total hit points left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound * hitPoints}\") print(f\"Elf", "if location.column > 0 and row[location.column - 1] == \".\": available.append(GridLocation(location.column - 1,", "and cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available)", "targetLocation: GridLocation = None shortestPath = None enemies = self.availableEnemies(cave, units) for enemy", "the dimensions of the puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight = 0", "the current location so go to the second step nextLocation: GridLocation = shortestPath[1]", "goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1 @dataclass class", "for unit in units: if unit.hitPoints > 0 and unit.race != self.race and", "not finished: for unit in units: if unit.hitPoints <= 0: continue if not", "= 200 attackDamage: int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other):", "< originalElfCount: elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit", "if there is an enemy available to attack. Keyword arguments: units -- A", "f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y != other.y: return self.y < other.y return", "unit.hitPoints < target.hitPoints or \\ unit.hitPoints == target.hitPoints and unit < target: target", "unit = next(unit for unit in units if unit.hitPoints > 0 and unit.atLocation(columnNumber,", "= True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not finished:", "+ \" \" if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get", "__name__ == \"__main__\": goblinsWin = True elfAttackPower = 3 originalElfCount = 0 survivingElfCount", "unit if target is not None: target.hitPoints -= self.attackDamage if target.hitPoints <= 0:", "in the path is the current location so go to the second step", "the same location as other \"\"\" return self.x == other.x and self.y ==", "-- The array representing the cave \"\"\" availableList = [] for unit in", "< other.y return self.x < other.x def __eq__(self, other): return self.x == other.x", "x and self.y == y def distanceTo(self, other): \"\"\" Return the Manhattan distance", "def atLocation(self, x, y): \"\"\" Return True if this unit is at this", "for line in infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create", "self.x = nextLocation.column self.y = nextLocation.row cave[self.y, self.x] = self.race def attack(self, cave,", "= self.race def attack(self, cave, units): \"\"\" Attack an available enemy. units --", "def move(self, cave, units) -> None: targetLocation: GridLocation = None shortestPath = None", "does not need to be available for attack. Keyword arguments: units -- A", "better candidate than one already found pathEnd = path[-1] if shortestPath is None", "enemy.location(), reachedLocation, cave, openLocations) if solution: path = nodeToPath(solution) # We found a", "row: int def __lt__(self, other): return self.row < other.row or \\ self.row ==", "if __name__ == \"__main__\": goblinsWin = True elfAttackPower = 3 originalElfCount = 0", "columnNumber, cell in enumerate(row): print(cell, end='') if showScores and cell in [\"E\", \"G\"]:", "if location.row + 1 < len(cave) and cave[location.row + 1, location.column] == \".\":", "other): \"\"\" Return the Manhattan distance between this unit and other Keyword arguments:", "\"E\" and unit.hitPoints > 0]) if survivingElfCount < originalElfCount: finished = True break", "an available enemy. units -- A list of all units. Does not need", "== \"E\"]) finished = False playRound = 0 while not finished: for unit", "return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1 @dataclass class Unit:", "\".\": available.append(GridLocation(location.column, location.row - 1)) if location.column > 0 and row[location.column - 1]", "generic_search import bfsCave, nodeToPath wall = \"#\" emptySpace = \".\" class GridLocation(NamedTuple): column:", "return self.x == other.x and self.y == other.y def location(self): return GridLocation(self.x, self.y)", "Return the Manhattan distance between this unit and other Keyword arguments: other --", "and the list of units. with open(puzzleName, \"r\") as infile: for rowNumber, line", "columnNumber, cell in enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3", "if not finished: playRound += 1 print(playRound) livingUnits = [unit for unit in", "unit in units if unit.race == \"E\"]) finished = False playRound = 0", "cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def", "Now see if it's a better candidate than one already found pathEnd =", "\"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit points left\") print(f\"Outcome: {playRound} * {hitPoints}", "\"\"\" Return a list of available enemies in the list Keyword arguments: units", "return availableList def move(self, cave, units) -> None: targetLocation: GridLocation = None shortestPath", "of the puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight = 0 puzzleWidth =", "is at the same location as other \"\"\" return self.x == other.x and", "loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of the puzzle. with open(puzzleName, \"r\") as", "enemyExists(self, units): \"\"\" Return True if an enemy exists. The enemy does not", "units): \"\"\" Return True if an enemy exists. The enemy does not need", "if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not finished: playRound += 1", "cell return cave, units if __name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\", 3)", "\"\"\" for unit in units: if unit.hitPoints > 0 and unit.race != self.race:", "= nextLocation.column self.y = nextLocation.row cave[self.y, self.x] = self.race def attack(self, cave, units):", "\"\"\" Attack an available enemy. units -- A list of all units. Does", "order. \"\"\" available = [] row = cave[location.row] if location.row > 0 and", "around the given location. The locations are in reading order. \"\"\" available =", "showScores and cell in [\"E\", \"G\"]: unit = next(unit for unit in units", "1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column -", "unit.hitPoints > 0] units = sorted(livingUnits) if __name__ == \"__main__\": goblinsWin = True", "enemy exists. The enemy does not need to be available for attack. Keyword", "cave, units) -> None: targetLocation: GridLocation = None shortestPath = None enemies =", "the puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight = 0 puzzleWidth = 0", "True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not finished: playRound", "= cell return cave, units if __name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\",", "the determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = []", "elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit", "< len(cave) and cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column, location.row + 1))", "puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight = 0 puzzleWidth = 0 for", "units: if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1:", "need to be sorted. \"\"\" for unit in units: if unit.hitPoints > 0", "in enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell", "<= 0: cave[target.y, target.x] = \".\" def printCave(cave, units, showScores=False): for rowNumber, row", "units: if unit.hitPoints <= 0: continue if not unit.enemyExists(units): finished = True break", "with the determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units =", "@dataclass class Unit: x: int y: int race: str hitPoints: int = 200", "if unit.race == \"E\" and unit.hitPoints > 0]) if survivingElfCount < originalElfCount: finished", "with {hitPoints} total hit points left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound *", "hitPoints: int = 200 attackDamage: int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def", "== other.row and self.column < other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\"", "True return False def enemyExists(self, units): \"\"\" Return True if an enemy exists.", "== target.hitPoints and unit < target: target = unit if target is not", "== \"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished = False playRound = 0", "unit.hitPoints > 0]) if survivingElfCount < originalElfCount: finished = True break if not", "- goalLocation.column) == 1 @dataclass class Unit: x: int y: int race: str", "given location. The locations are in reading order. \"\"\" available = [] row", "> 0 and unit.race != self.race and self.distanceTo(unit) == 1: return True return", "< originalElfCount: finished = True break if not finished: playRound += 1 print(playRound)", "The locations are in reading order. \"\"\" available = [] row = cave[location.row]", "str hitPoints: int = 200 attackDamage: int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\"", "rowNumber, cell, attackDamage=3 if cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell", "units) unit.attack(cave, units) survivingElfCount = len([unit for unit in units if unit.race ==", "units. Does not need to be sorted. cave -- The array representing the", "a list of available enemies in the list Keyword arguments: units -- A", "= np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = [] # Populate the cave and", "\"\"\" return abs(self.x - other.x) + abs(self.y - other.y) def canAttack(self, units): \"\"\"", "True elfAttackPower = 3 originalElfCount = 0 survivingElfCount = 0 while goblinsWin or", "unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit for unit in units if unit.race", "exists. The enemy does not need to be available for attack. Keyword arguments:", "print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of the puzzle.", "and openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self, cave, units) -> None: targetLocation:", "openLocations) if solution: path = nodeToPath(solution) # We found a path. Now see", "not finished: playRound += 1 print(playRound) livingUnits = [unit for unit in units", "and self.column < other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return a", "cave, units): \"\"\" Return a list of available enemies in the list Keyword", "1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit in units", "GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return True if this unit is at", "print(f\"{survivingRace} win with {hitPoints} total hit points left\") print(f\"Outcome: {playRound} * {hitPoints} =", "== 1: return True return False def enemyExists(self, units): \"\"\" Return True if", "for attack. Keyword arguments: units -- A list of all units. Does not", "< targetLocation): targetLocation = pathEnd shortestPath = path if shortestPath: cave[self.y, self.x] =", "self.distanceTo(unit) == 1: if target is None or unit.hitPoints < target.hitPoints or \\", "if shortestPath is None or len(path) < len(shortestPath) or \\ len(path) == len(shortestPath)", "= 3 originalElfCount = 0 survivingElfCount = 0 while goblinsWin or survivingElfCount <", "target = unit if target is not None: target.hitPoints -= self.attackDamage if target.hitPoints", "Unit: x: int y: int race: str hitPoints: int = 200 attackDamage: int", "is an enemy available to attack. Keyword arguments: units -- A list of", "0 and unit.race != self.race and self.distanceTo(unit) == 1: return True return False", "A list of all units. Does not need to be sorted. \"\"\" for", "enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber,", "if self.y != other.y: return self.y < other.y return self.x < other.x def", "atLocation(self, x, y): \"\"\" Return True if this unit is at this x,y", "location \"\"\" return self.x == x and self.y == y def distanceTo(self, other):", "current location so go to the second step nextLocation: GridLocation = shortestPath[1] self.x", "self.row < other.row or \\ self.row == other.row and self.column < other.column def", "return self.x < other.x def __eq__(self, other): return self.x == other.x and self.y", "== \".\": available.append(GridLocation(location.column, location.row - 1)) if location.column > 0 and row[location.column -", "unit.enemyExists(units): finished = True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if", "- 1, location.row)) if location.column + 1 < len(row) and row[location.column + 1]", "and unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self, cave,", "< other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return a list of", "== 1 @dataclass class Unit: x: int y: int race: str hitPoints: int", "an enemy exists. The enemy does not need to be available for attack.", "= loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit in units if unit.race ==", "in units if unit.race == \"E\"]) finished = False playRound = 0 while", "> 0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \" \" if len(scores.strip()):", "= nodeToPath(solution) # We found a path. Now see if it's a better", "True return False def availableEnemies(self, cave, units): \"\"\" Return a list of available", "target.hitPoints <= 0: cave[target.y, target.x] = \".\" def printCave(cave, units, showScores=False): for rowNumber,", "line in infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the", "units if __name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished = False", "int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y !=", "location as other \"\"\" return self.x == other.x and self.y == other.y def", "True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit for", "units if unit.race == \"E\" and unit.hitPoints > 0]) if survivingElfCount < originalElfCount:", "not need to be sorted. \"\"\" target = None for unit in units:", "[unit for unit in units if unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin", "rowNumber, line in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if cell in ['E',", "Does not need to be sorted. \"\"\" for unit in units: if unit.hitPoints", "__lt__(self, other): if self.y != other.y: return self.y < other.y return self.x <", "goblinsWin = True elfAttackPower = 3 originalElfCount = 0 survivingElfCount = 0 while", "enumerate(row): print(cell, end='') if showScores and cell in [\"E\", \"G\"]: unit = next(unit", "the path is the current location so go to the second step nextLocation:", "unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self, cave, units)", "\"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound} full rounds\") hitPoints = sum([unit.hitPoints", "str(unit) + \" \" if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): #", "cave, units): \"\"\" Attack an available enemy. units -- A list of all", "other.row or \\ self.row == other.row and self.column < other.column def openLocations(cave, location:", "units: if unit.hitPoints > 0 and unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit)", "1: return True return False def enemyExists(self, units): \"\"\" Return True if an", "unit in units: if unit.hitPoints <= 0: continue if not unit.enemyExists(units): finished =", "< other.x def __eq__(self, other): return self.x == other.x and self.y == other.y", "location.column > 0 and row[location.column - 1] == \".\": available.append(GridLocation(location.column - 1, location.row))", "3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y != other.y: return", "1] == \".\": available.append(GridLocation(location.column - 1, location.row)) if location.column + 1 < len(row)", "0 while not finished: for unit in units: if unit.hitPoints <= 0: continue", "survivingElfCount < originalElfCount: elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount =", "list of the open locations around the given location. The locations are in", "an enemy available to attack. Keyword arguments: units -- A list of all", "3) finished = False playRound = 0 while not finished: for unit in", "+ 1] == \".\": available.append(GridLocation(location.column + 1, location.row)) if location.row + 1 <", "in infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the cave", "unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1: return True", "list of all units. Does not need to be sorted. \"\"\" for unit", "cave[target.y, target.x] = \".\" def printCave(cave, units, showScores=False): for rowNumber, row in enumerate(cave):", "0 and unit.race != self.race: return True return False def availableEnemies(self, cave, units):", "solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution: path = nodeToPath(solution) #", "list of all units. Does not need to be sorted. \"\"\" target =", "__name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished = False playRound =", "in units: if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) ==", "from generic_search import bfsCave, nodeToPath wall = \"#\" emptySpace = \".\" class GridLocation(NamedTuple):", "to be sorted. \"\"\" for unit in units: if unit.hitPoints > 0 and", "cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G' else", "in units if unit.race == \"E\" and unit.hitPoints > 0]) if survivingElfCount <", "sorted. \"\"\" target = None for unit in units: if unit.hitPoints > 0", "class GridLocation(NamedTuple): column: int row: int def __lt__(self, other): return self.row < other.row", "-- A list of all units. Does not need to be sorted. cave", "in units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) +", "shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row cave[self.y, self.x] = self.race def attack(self,", "== 'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave, units if __name__", "unit in units: if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit)", "path is the current location so go to the second step nextLocation: GridLocation", "target is None or unit.hitPoints < target.hitPoints or \\ unit.hitPoints == target.hitPoints and", "the Manhattan distance between this unit and other Keyword arguments: other -- The", "and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \" \" if len(scores.strip()): print(scores, end='')", "print(f\"Combat ends after {playRound} full rounds\") hitPoints = sum([unit.hitPoints for unit in units])", "+ 1 < len(row) and row[location.column + 1] == \".\": available.append(GridLocation(location.column + 1,", "cave with the determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units", "0: cave[target.y, target.x] = \".\" def printCave(cave, units, showScores=False): for rowNumber, row in", "originalElfCount = len([unit for unit in units if unit.race == \"E\"]) finished =", "+ 1, location.row)) if location.row + 1 < len(cave) and cave[location.row + 1,", "= len([unit for unit in units if unit.race == \"E\"]) finished = False", "self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self, cave, units) -> None:", "of all units. Does not need to be sorted. cave -- The array", "self.y == other.y def location(self): return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return", "\"\"\" available = [] row = cave[location.row] if location.row > 0 and cave[location.row", "= loadPuzzle(\"15.txt\", 3) finished = False playRound = 0 while not finished: for", "+ 1 < len(cave) and cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column, location.row", "unit.race != self.race: return True return False def availableEnemies(self, cave, units): \"\"\" Return", "= path[-1] if shortestPath is None or len(path) < len(shortestPath) or \\ len(path)", "\" \" if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get the", "enemy available to attack. Keyword arguments: units -- A list of all units.", "not need to be sorted. cave -- The array representing the cave \"\"\"", "originalElfCount = 0 survivingElfCount = 0 while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower", "goblinsWin = units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound} full", "print(cell, end='') if showScores and cell in [\"E\", \"G\"]: unit = next(unit for", "in enumerate(cave): scores = \" \" for columnNumber, cell in enumerate(row): print(cell, end='')", "units) for enemy in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if", "step nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row cave[self.y, self.x]", "units) unit.attack(cave, units) if not finished: playRound += 1 print(playRound) livingUnits = [unit", "is at this x,y location \"\"\" return self.x == x and self.y ==", "int row: int def __lt__(self, other): return self.row < other.row or \\ self.row", "reading order. \"\"\" available = [] row = cave[location.row] if location.row > 0", "len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of the", "\"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit points left\") print(f\"Outcome: {playRound}", "0 and row[location.column - 1] == \".\": available.append(GridLocation(location.column - 1, location.row)) if location.column", "in [\"E\", \"G\"]: unit = next(unit for unit in units if unit.hitPoints >", "rowNumber, row in enumerate(cave): scores = \" \" for columnNumber, cell in enumerate(row):", "Get the dimensions of the puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight =", "'.', dtype=str) units = [] # Populate the cave and the list of", "int y: int race: str hitPoints: int = 200 attackDamage: int = 3", "candidate than one already found pathEnd = path[-1] if shortestPath is None or", "= sum([unit.hitPoints for unit in units]) survivingRace = \"Goblins\" if units[0].race == \"G\"", "< other.row or \\ self.row == other.row and self.column < other.column def openLocations(cave,", "be sorted. \"\"\" target = None for unit in units: if unit.hitPoints >", "if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \" \"", "dtype=str) units = [] # Populate the cave and the list of units.", "units]) survivingRace = \"Goblins\" if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win with", "arguments: units -- A list of all units. Does not need to be", "attack(self, cave, units): \"\"\" Attack an available enemy. units -- A list of", "if location.column + 1 < len(row) and row[location.column + 1] == \".\": available.append(GridLocation(location.column", "available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row)", "\"__main__\": goblinsWin = True elfAttackPower = 3 originalElfCount = 0 survivingElfCount = 0", "or \\ unit.hitPoints == target.hitPoints and unit < target: target = unit if", "the list of units. with open(puzzleName, \"r\") as infile: for rowNumber, line in", "availableList = [] for unit in units: if unit.hitPoints > 0 and unit.race", "other.y def location(self): return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return True if", "1 < len(row) and row[location.column + 1] == \".\": available.append(GridLocation(location.column + 1, location.row))", "if unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave,", "return abs(self.x - other.x) + abs(self.y - other.y) def canAttack(self, units): \"\"\" Return", "if unit.hitPoints > 0 and unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit) return", "nodeToPath(solution) # We found a path. Now see if it's a better candidate", "playRound += 1 print(playRound) livingUnits = [unit for unit in units if unit.hitPoints", "in enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution: path =", "for unit in units if unit.hitPoints > 0] units = sorted(livingUnits) if __name__", "continue if not unit.enemyExists(units): finished = True break if not unit.canAttack(units): unit.move(cave, units)", "0 while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower += 1 cave, units =", "dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = [] # Populate the", "True if this unit is at the same location as other \"\"\" return", "enemies: solution = bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution: path = nodeToPath(solution)", "and (pathEnd < targetLocation): targetLocation = pathEnd shortestPath = path if shortestPath: cave[self.y,", "need to be sorted. cave -- The array representing the cave \"\"\" availableList", "list of all units. Does not need to be sorted. cave -- The", "loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit in units if unit.race == \"E\"])", "< len(shortestPath) or \\ len(path) == len(shortestPath) and (pathEnd < targetLocation): targetLocation =", "not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit for unit in units", "= True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit", "== \"E\" and unit.hitPoints > 0]) if survivingElfCount < originalElfCount: finished = True", "units = sorted(livingUnits) if __name__ == \"__main__\": goblinsWin = True elfAttackPower = 3", "as np from generic_search import bfsCave, nodeToPath wall = \"#\" emptySpace = \".\"", "numpy as np from generic_search import bfsCave, nodeToPath wall = \"#\" emptySpace =", "for columnNumber, cell in enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell,", "List[GridLocation]: \"\"\" Return a list of the open locations around the given location.", "row[location.column - 1] == \".\": available.append(GridLocation(location.column - 1, location.row)) if location.column + 1", "path[-1] if shortestPath is None or len(path) < len(shortestPath) or \\ len(path) ==", "than one already found pathEnd = path[-1] if shortestPath is None or len(path)", "shortestPath is None or len(path) < len(shortestPath) or \\ len(path) == len(shortestPath) and", "200 attackDamage: int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if", "at this x,y location \"\"\" return self.x == x and self.y == y", "originalElfCount: elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for", "need to be sorted. \"\"\" target = None for unit in units: if", "+= str(unit) + \" \" if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower):", "-= self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x] = \".\" def printCave(cave, units,", "goblinsWin or survivingElfCount < originalElfCount: elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower)", "\".\": available.append(GridLocation(location.column - 1, location.row)) if location.column + 1 < len(row) and row[location.column", "to attack. Keyword arguments: units -- A list of all units. Does not", "unit in units: if unit.hitPoints > 0 and unit.race != self.race: return True", "units): \"\"\" Return a list of available enemies in the list Keyword arguments:", "= \"Goblins\" if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total", "{hitPoints} total hit points left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound * hitPoints}\")", "def canAttack(self, units): \"\"\" Return True if there is an enemy available to", "elfAttackPower = 3 originalElfCount = 0 survivingElfCount = 0 while goblinsWin or survivingElfCount", "dataclasses import dataclass from typing import List, NamedTuple import numpy as np from", "cave and the list of units. with open(puzzleName, \"r\") as infile: for rowNumber,", "the given location. The locations are in reading order. \"\"\" available = []", "- other.x) + abs(self.y - other.y) def canAttack(self, units): \"\"\" Return True if", "so go to the second step nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column", "shortestPath = path if shortestPath: cave[self.y, self.x] = '.' # The first step", "other -- The other unit. \"\"\" return abs(self.x - other.x) + abs(self.y -", "bfsCave(self.location(), enemy.location(), reachedLocation, cave, openLocations) if solution: path = nodeToPath(solution) # We found", "if cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave, units", "units): \"\"\" Return True if there is an enemy available to attack. Keyword", "None: target.hitPoints -= self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x] = \".\" def", "units = loadPuzzle(\"15.txt\", 3) finished = False playRound = 0 while not finished:", "self.x] = self.race def attack(self, cave, units): \"\"\" Attack an available enemy. units", "other): return self.x == other.x and self.y == other.y def location(self): return GridLocation(self.x,", "second step nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row cave[self.y,", "\\ len(path) == len(shortestPath) and (pathEnd < targetLocation): targetLocation = pathEnd shortestPath =", "0 survivingElfCount = 0 while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower += 1", "location.row + 1 < len(cave) and cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column,", "units: if unit.hitPoints > 0 and unit.race != self.race: return True return False", "len([unit for unit in units if unit.race == \"E\" and unit.hitPoints > 0])", "elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave, units if __name__ == \"15a\": cave,", "for unit in units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores +=", "location. The locations are in reading order. \"\"\" available = [] row =", "row in enumerate(cave): scores = \" \" for columnNumber, cell in enumerate(row): print(cell,", "target.x] = \".\" def printCave(cave, units, showScores=False): for rowNumber, row in enumerate(cave): scores", "puzzleHeight = 0 puzzleWidth = 0 for line in infile: puzzleHeight += 1", "for rowNumber, line in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if cell in", "= 0 while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower += 1 cave, units", "< len(row) and row[location.column + 1] == \".\": available.append(GridLocation(location.column + 1, location.row)) if", "step in the path is the current location so go to the second", "wall = \"#\" emptySpace = \".\" class GridLocation(NamedTuple): column: int row: int def", "return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column)", "def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1", "self.y != other.y: return self.y < other.y return self.x < other.x def __eq__(self,", "\".\": available.append(GridLocation(location.column + 1, location.row)) if location.row + 1 < len(cave) and cave[location.row", "Manhattan distance between this unit and other Keyword arguments: other -- The other", "> 0 and unit.race != self.race and self.distanceTo(unit) == 1: if target is", "the cave and the list of units. with open(puzzleName, \"r\") as infile: for", "if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of", "units. with open(puzzleName, \"r\") as infile: for rowNumber, line in enumerate(infile): for columnNumber,", "to be sorted. cave -- The array representing the cave \"\"\" availableList =", "- other.y) def canAttack(self, units): \"\"\" Return True if there is an enemy", "= unit if target is not None: target.hitPoints -= self.attackDamage if target.hitPoints <=", "units. Does not need to be sorted. \"\"\" for unit in units: if", "GridLocation = shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row cave[self.y, self.x] = self.race", "finished = True break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not", "return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return True if this unit is", "\" if len(scores.strip()): print(scores, end='') print() def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions", "> 0]) if survivingElfCount < originalElfCount: finished = True break if not finished:", "as infile: for rowNumber, line in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if", "and self.y == y def distanceTo(self, other): \"\"\" Return the Manhattan distance between", "if __name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished = False playRound", "True break if not finished: playRound += 1 print(playRound) livingUnits = [unit for", "return cave, units if __name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished", "if unit.hitPoints > 0 and unit.race != self.race and self.distanceTo(unit) == 1: if", "GridLocation(NamedTuple): column: int row: int def __lt__(self, other): return self.row < other.row or", "= \".\" class GridLocation(NamedTuple): column: int row: int def __lt__(self, other): return self.row", "survivingRace = \"Goblins\" if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints}", "available.append(GridLocation(location.column - 1, location.row)) if location.column + 1 < len(row) and row[location.column +", "elfAttackPower) originalElfCount = len([unit for unit in units if unit.race == \"E\"]) finished", "\"\"\" return self.x == x and self.y == y def distanceTo(self, other): \"\"\"", "dimensions of the puzzle. with open(puzzleName, \"r\") as infile: puzzleHeight = 0 puzzleWidth", "of the open locations around the given location. The locations are in reading", "showScores=True) print(f\"Combat ends after {playRound} full rounds\") hitPoints = sum([unit.hitPoints for unit in", "list Keyword arguments: units -- A list of all units. Does not need", "+= 1 cave, units = loadPuzzle(\"15.txt\", elfAttackPower) originalElfCount = len([unit for unit in", "x,y location \"\"\" return self.x == x and self.y == y def distanceTo(self,", "import bfsCave, nodeToPath wall = \"#\" emptySpace = \".\" class GridLocation(NamedTuple): column: int", "with open(puzzleName, \"r\") as infile: for rowNumber, line in enumerate(infile): for columnNumber, cell", "units. Does not need to be sorted. \"\"\" target = None for unit", "need to be available for attack. Keyword arguments: units -- A list of", "> 0 and unit.race != self.race: return True return False def availableEnemies(self, cave,", "cave, units = loadPuzzle(\"15.txt\", 3) finished = False playRound = 0 while not", "\"#\" emptySpace = \".\" class GridLocation(NamedTuple): column: int row: int def __lt__(self, other):", "def printCave(cave, units, showScores=False): for rowNumber, row in enumerate(cave): scores = \" \"", "1)) if location.column > 0 and row[location.column - 1] == \".\": available.append(GridLocation(location.column -", "self.distanceTo(unit) == 1: return True return False def enemyExists(self, units): \"\"\" Return True", "attackDamage: int = 3 def __str__(self): return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y", "return f\"{self.race}({self.hitPoints})\" def __lt__(self, other): if self.y != other.y: return self.y < other.y", "self.y == y def distanceTo(self, other): \"\"\" Return the Manhattan distance between this", "= pathEnd shortestPath = path if shortestPath: cave[self.y, self.x] = '.' # The", "self.race and self.distanceTo(unit) == 1: return True return False def enemyExists(self, units): \"\"\"", "NamedTuple import numpy as np from generic_search import bfsCave, nodeToPath wall = \"#\"", "= None shortestPath = None enemies = self.availableEnemies(cave, units) for enemy in enemies:", "target.hitPoints or \\ unit.hitPoints == target.hitPoints and unit < target: target = unit", "= sorted(livingUnits) if __name__ == \"__main__\": goblinsWin = True elfAttackPower = 3 originalElfCount", "if it's a better candidate than one already found pathEnd = path[-1] if", "playRound = 0 while not finished: for unit in units: if unit.hitPoints <=", "left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound * hitPoints}\") print(f\"Elf attack power: {elfAttackPower}\")", "for unit in units: if unit.hitPoints <= 0: continue if not unit.enemyExists(units): finished", "other.x and self.y == other.y def atLocation(self, x, y): \"\"\" Return True if", "reachedLocation, cave, openLocations) if solution: path = nodeToPath(solution) # We found a path.", "originalElfCount: finished = True break if not finished: playRound += 1 print(playRound) livingUnits", "1: if target is None or unit.hitPoints < target.hitPoints or \\ unit.hitPoints ==", "- goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1 @dataclass class Unit: x: int", "False def availableEnemies(self, cave, units): \"\"\" Return a list of available enemies in", "1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the cave with the determined puzzle", "None enemies = self.availableEnemies(cave, units) for enemy in enemies: solution = bfsCave(self.location(), enemy.location(),", "= max(puzzleWidth, len(line.rstrip())) # Create the cave with the determined puzzle dimensions. cave", "\"\"\" target = None for unit in units: if unit.hitPoints > 0 and", "sorted(livingUnits) if __name__ == \"__main__\": goblinsWin = True elfAttackPower = 3 originalElfCount =", "cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column, location.row - 1)) if location.column >", "location.row - 1)) if location.column > 0 and row[location.column - 1] == \".\":", "and other Keyword arguments: other -- The other unit. \"\"\" return abs(self.x -", "and unit.race != self.race and self.distanceTo(unit) == 1: return True return False def", "rowNumber)) scores += str(unit) + \" \" if len(scores.strip()): print(scores, end='') print() def", "def enemyExists(self, units): \"\"\" Return True if an enemy exists. The enemy does", "if showScores and cell in [\"E\", \"G\"]: unit = next(unit for unit in", "path if shortestPath: cave[self.y, self.x] = '.' # The first step in the", "available enemies in the list Keyword arguments: units -- A list of all", "units -- A list of all units. Does not need to be sorted.", "hitPoints = sum([unit.hitPoints for unit in units]) survivingRace = \"Goblins\" if units[0].race ==", "target = None for unit in units: if unit.hitPoints > 0 and unit.race", "infile: for rowNumber, line in enumerate(infile): for columnNumber, cell in enumerate(line.rstrip()): if cell", "cell in enumerate(row): print(cell, end='') if showScores and cell in [\"E\", \"G\"]: unit", "= '.' # The first step in the path is the current location", "availableList def move(self, cave, units) -> None: targetLocation: GridLocation = None shortestPath =", "if solution: path = nodeToPath(solution) # We found a path. Now see if", "== other.x and self.y == other.y def location(self): return GridLocation(self.x, self.y) def sameLocation(self,", "+ 1, location.column] == \".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def reachedLocation(currentLocation,", "unit.attack(cave, units) if not finished: playRound += 1 print(playRound) livingUnits = [unit for", "or unit.hitPoints < target.hitPoints or \\ unit.hitPoints == target.hitPoints and unit < target:", "nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row cave[self.y, self.x] =", "cave[self.y, self.x] = self.race def attack(self, cave, units): \"\"\" Attack an available enemy.", "0 and unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self,", "'.' # The first step in the path is the current location so", "finished = True break if not finished: playRound += 1 print(playRound) livingUnits =", "for unit in units if unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin =", "= cave[location.row] if location.row > 0 and cave[location.row - 1, location.column] == \".\":", "The other unit. \"\"\" return abs(self.x - other.x) + abs(self.y - other.y) def", "availableList.append(unit) return availableList def move(self, cave, units) -> None: targetLocation: GridLocation = None", "= 0 while not finished: for unit in units: if unit.hitPoints <= 0:", "enemies in the list Keyword arguments: units -- A list of all units.", "return True return False def availableEnemies(self, cave, units): \"\"\" Return a list of", "== 1: if target is None or unit.hitPoints < target.hitPoints or \\ unit.hitPoints", "[] # Populate the cave and the list of units. with open(puzzleName, \"r\")", "1 print(playRound) livingUnits = [unit for unit in units if unit.hitPoints > 0]", "int def __lt__(self, other): return self.row < other.row or \\ self.row == other.row", "!= self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList def move(self, cave, units) ->", "None shortestPath = None enemies = self.availableEnemies(cave, units) for enemy in enemies: solution", "unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not finished: playRound += 1 print(playRound) livingUnits", "y: int race: str hitPoints: int = 200 attackDamage: int = 3 def", "1, location.row)) if location.column + 1 < len(row) and row[location.column + 1] ==", "Return True if this unit is at the same location as other \"\"\"", "\".\": available.append(GridLocation(location.column, location.row + 1)) return sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row -", "\" for columnNumber, cell in enumerate(row): print(cell, end='') if showScores and cell in", "availableEnemies(self, cave, units): \"\"\" Return a list of available enemies in the list", "return self.y < other.y return self.x < other.x def __eq__(self, other): return self.x", "= True break if not finished: playRound += 1 print(playRound) livingUnits = [unit", "== \"G\" else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit points left\") print(f\"Outcome:", "for rowNumber, row in enumerate(cave): scores = \" \" for columnNumber, cell in", "if an enemy exists. The enemy does not need to be available for", "openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return a list of the open locations", "available.append(GridLocation(location.column + 1, location.row)) if location.row + 1 < len(cave) and cave[location.row +", "= units[0].race == \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound} full rounds\")", "== other.y def atLocation(self, x, y): \"\"\" Return True if this unit is", "== \".\": available.append(GridLocation(location.column + 1, location.row)) if location.row + 1 < len(cave) and", "target is not None: target.hitPoints -= self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x]", "other.y return self.x < other.x def __eq__(self, other): return self.x == other.x and", "> 0] units = sorted(livingUnits) if __name__ == \"__main__\": goblinsWin = True elfAttackPower", "not need to be available for attack. Keyword arguments: units -- A list", "if unit.hitPoints <= 0: continue if not unit.enemyExists(units): finished = True break if", "next(unit for unit in units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores", "in units if unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin = units[0].race ==", "1 < len(cave) and cave[location.row + 1, location.column] == \".\": available.append(GridLocation(location.column, location.row +", "if this unit is at the same location as other \"\"\" return self.x", "units = [] # Populate the cave and the list of units. with", "1, location.column] == \".\": available.append(GridLocation(location.column, location.row - 1)) if location.column > 0 and", "== y def distanceTo(self, other): \"\"\" Return the Manhattan distance between this unit", "None or unit.hitPoints < target.hitPoints or \\ unit.hitPoints == target.hitPoints and unit <", "sameLocation(self, other): \"\"\" Return True if this unit is at the same location", "not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not finished: playRound += 1 print(playRound)", "0]) if survivingElfCount < originalElfCount: finished = True break if not finished: playRound", "found pathEnd = path[-1] if shortestPath is None or len(path) < len(shortestPath) or", "all units. Does not need to be sorted. \"\"\" target = None for", "location.column + 1 < len(row) and row[location.column + 1] == \".\": available.append(GridLocation(location.column +", "cave, units if __name__ == \"15a\": cave, units = loadPuzzle(\"15.txt\", 3) finished =", "+ abs(currentLocation.column - goalLocation.column) == 1 @dataclass class Unit: x: int y: int", "1, location.row)) if location.row + 1 < len(cave) and cave[location.row + 1, location.column]", "goalLocation.column) == 1 @dataclass class Unit: x: int y: int race: str hitPoints:", "Keyword arguments: other -- The other unit. \"\"\" return abs(self.x - other.x) +", "units, showScores=True) print(f\"Combat ends after {playRound} full rounds\") hitPoints = sum([unit.hitPoints for unit", "full rounds\") hitPoints = sum([unit.hitPoints for unit in units]) survivingRace = \"Goblins\" if", "abs(self.y - other.y) def canAttack(self, units): \"\"\" Return True if there is an", "# Create the cave with the determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth),", "the cave \"\"\" availableList = [] for unit in units: if unit.hitPoints >", "not need to be sorted. \"\"\" for unit in units: if unit.hitPoints >", "> 0 and cave[location.row - 1, location.column] == \".\": available.append(GridLocation(location.column, location.row - 1))", "The array representing the cave \"\"\" availableList = [] for unit in units:", "sorted(available) def reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) ==", "= [unit for unit in units if unit.hitPoints > 0] units = sorted(livingUnits)", "x, y): \"\"\" Return True if this unit is at this x,y location", "of available enemies in the list Keyword arguments: units -- A list of", "determined puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = [] #", "unit.move(cave, units) unit.attack(cave, units) if not finished: playRound += 1 print(playRound) livingUnits =", "if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) survivingElfCount = len([unit for unit in", "def loadPuzzle(puzzleName, elfAttackPower): # Get the dimensions of the puzzle. with open(puzzleName, \"r\")", "3 originalElfCount = 0 survivingElfCount = 0 while goblinsWin or survivingElfCount < originalElfCount:", "== \"G\" printCave(cave, units, showScores=True) print(f\"Combat ends after {playRound} full rounds\") hitPoints =", "to be available for attack. Keyword arguments: units -- A list of all", "go to the second step nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column self.y", "-- A list of all units. Does not need to be sorted. \"\"\"", "while goblinsWin or survivingElfCount < originalElfCount: elfAttackPower += 1 cave, units = loadPuzzle(\"15.txt\",", "units if unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin = units[0].race == \"G\"", "dataclass from typing import List, NamedTuple import numpy as np from generic_search import", "as infile: puzzleHeight = 0 puzzleWidth = 0 for line in infile: puzzleHeight", "\" \" for columnNumber, cell in enumerate(row): print(cell, end='') if showScores and cell", "= shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row cave[self.y, self.x] = self.race def", "break if not unit.canAttack(units): unit.move(cave, units) unit.attack(cave, units) if not finished: playRound +=", "= 0 for line in infile: puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip()))", "self.attackDamage if target.hitPoints <= 0: cave[target.y, target.x] = \".\" def printCave(cave, units, showScores=False):", "cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber] = cell return cave, units if", "return self.row < other.row or \\ self.row == other.row and self.column < other.column", "else \"Elves\" print(f\"{survivingRace} win with {hitPoints} total hit points left\") print(f\"Outcome: {playRound} *", "cave[rowNumber, columnNumber] = cell return cave, units if __name__ == \"15a\": cave, units", "and row[location.column - 1] == \".\": available.append(GridLocation(location.column - 1, location.row)) if location.column +", "\"\"\" Return a list of the open locations around the given location. The", "sorted. cave -- The array representing the cave \"\"\" availableList = [] for", "def __eq__(self, other): return self.x == other.x and self.y == other.y def location(self):", "= len([unit for unit in units if unit.race == \"E\" and unit.hitPoints >", "be available for attack. Keyword arguments: units -- A list of all units.", "return False def enemyExists(self, units): \"\"\" Return True if an enemy exists. The", "= [] # Populate the cave and the list of units. with open(puzzleName,", "at the same location as other \"\"\" return self.x == other.x and self.y", "\"\"\" Return True if an enemy exists. The enemy does not need to", "unit in units: if unit.hitPoints > 0 and unit.race != self.race and openLocations(cave,", "open(puzzleName, \"r\") as infile: for rowNumber, line in enumerate(infile): for columnNumber, cell in", "unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \" \" if len(scores.strip()): print(scores, end='') print()", "\"G\"]: unit = next(unit for unit in units if unit.hitPoints > 0 and", "== other.y def location(self): return GridLocation(self.x, self.y) def sameLocation(self, other): \"\"\" Return True", "it's a better candidate than one already found pathEnd = path[-1] if shortestPath", "points left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound * hitPoints}\") print(f\"Elf attack power:", "for unit in units if unit.race == \"E\"]) finished = False playRound =", "other.column def openLocations(cave, location: GridLocation) -> List[GridLocation]: \"\"\" Return a list of the", "other unit. \"\"\" return abs(self.x - other.x) + abs(self.y - other.y) def canAttack(self,", "\"\"\" Return True if there is an enemy available to attack. Keyword arguments:", "found a path. Now see if it's a better candidate than one already", "enumerate(line.rstrip()): if cell in ['E', 'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell ==", "unit.hitPoints > 0] units = sorted(livingUnits) goblinsWin = units[0].race == \"G\" printCave(cave, units,", "unit in units if unit.hitPoints > 0] units = sorted(livingUnits) if __name__ ==", "be sorted. cave -- The array representing the cave \"\"\" availableList = []", "hit points left\") print(f\"Outcome: {playRound} * {hitPoints} = {playRound * hitPoints}\") print(f\"Elf attack", "x: int y: int race: str hitPoints: int = 200 attackDamage: int =", "import List, NamedTuple import numpy as np from generic_search import bfsCave, nodeToPath wall", "if survivingElfCount < originalElfCount: finished = True break if not finished: playRound +=", "race: str hitPoints: int = 200 attackDamage: int = 3 def __str__(self): return", "target: target = unit if target is not None: target.hitPoints -= self.attackDamage if", "== len(shortestPath) and (pathEnd < targetLocation): targetLocation = pathEnd shortestPath = path if", "to be sorted. \"\"\" target = None for unit in units: if unit.hitPoints", "int race: str hitPoints: int = 200 attackDamage: int = 3 def __str__(self):", "path. Now see if it's a better candidate than one already found pathEnd", "(pathEnd < targetLocation): targetLocation = pathEnd shortestPath = path if shortestPath: cave[self.y, self.x]", "< target: target = unit if target is not None: target.hitPoints -= self.attackDamage", "break if not finished: playRound += 1 print(playRound) livingUnits = [unit for unit", "enumerate(cave): scores = \" \" for columnNumber, cell in enumerate(row): print(cell, end='') if", "pathEnd shortestPath = path if shortestPath: cave[self.y, self.x] = '.' # The first", "= None enemies = self.availableEnemies(cave, units) for enemy in enemies: solution = bfsCave(self.location(),", "reachedLocation(currentLocation, goalLocation): return abs(currentLocation.row - goalLocation.row) + abs(currentLocation.column - goalLocation.column) == 1 @dataclass", "The first step in the path is the current location so go to", "location.row)) if location.row + 1 < len(cave) and cave[location.row + 1, location.column] ==", "of units. with open(puzzleName, \"r\") as infile: for rowNumber, line in enumerate(infile): for", "__eq__(self, other): return self.x == other.x and self.y == other.y def location(self): return", "unit.hitPoints > 0 and unit.race != self.race and openLocations(cave, unit.location()): availableList.append(unit) return availableList", "return True return False def enemyExists(self, units): \"\"\" Return True if an enemy", "first step in the path is the current location so go to the", "0] units = sorted(livingUnits) if __name__ == \"__main__\": goblinsWin = True elfAttackPower =", "canAttack(self, units): \"\"\" Return True if there is an enemy available to attack.", "\\ unit.hitPoints == target.hitPoints and unit < target: target = unit if target", "= 0 puzzleWidth = 0 for line in infile: puzzleHeight += 1 puzzleWidth", "True if this unit is at this x,y location \"\"\" return self.x ==", "'G']: units.append(Unit(columnNumber, rowNumber, cell, attackDamage=3 if cell == 'G' else elfAttackPower)) cave[rowNumber, columnNumber]", "in reading order. \"\"\" available = [] row = cave[location.row] if location.row >", "unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit) + \" \" if", "representing the cave \"\"\" availableList = [] for unit in units: if unit.hitPoints", "solution: path = nodeToPath(solution) # We found a path. Now see if it's", "= True elfAttackPower = 3 originalElfCount = 0 survivingElfCount = 0 while goblinsWin", "the second step nextLocation: GridLocation = shortestPath[1] self.x = nextLocation.column self.y = nextLocation.row", "Return a list of the open locations around the given location. The locations", "unit < target: target = unit if target is not None: target.hitPoints -=", "\"\"\" Return the Manhattan distance between this unit and other Keyword arguments: other", "unit in units if unit.hitPoints > 0 and unit.atLocation(columnNumber, rowNumber)) scores += str(unit)", "== other.x and self.y == other.y def atLocation(self, x, y): \"\"\" Return True", "len(shortestPath) or \\ len(path) == len(shortestPath) and (pathEnd < targetLocation): targetLocation = pathEnd", "one already found pathEnd = path[-1] if shortestPath is None or len(path) <", "== \"__main__\": goblinsWin = True elfAttackPower = 3 originalElfCount = 0 survivingElfCount =", "available = [] row = cave[location.row] if location.row > 0 and cave[location.row -", "puzzleHeight += 1 puzzleWidth = max(puzzleWidth, len(line.rstrip())) # Create the cave with the", "Return True if this unit is at this x,y location \"\"\" return self.x", "\\ self.row == other.row and self.column < other.column def openLocations(cave, location: GridLocation) ->", "between this unit and other Keyword arguments: other -- The other unit. \"\"\"", "Does not need to be sorted. \"\"\" target = None for unit in", "puzzle dimensions. cave = np.full((puzzleHeight, puzzleWidth), '.', dtype=str) units = [] # Populate", "units): \"\"\" Attack an available enemy. units -- A list of all units.", "- 1] == \".\": available.append(GridLocation(location.column - 1, location.row)) if location.column + 1 <", "abs(currentLocation.column - goalLocation.column) == 1 @dataclass class Unit: x: int y: int race:", "unit in units]) survivingRace = \"Goblins\" if units[0].race == \"G\" else \"Elves\" print(f\"{survivingRace}", "row[location.column + 1] == \".\": available.append(GridLocation(location.column + 1, location.row)) if location.row + 1", "\"r\") as infile: puzzleHeight = 0 puzzleWidth = 0 for line in infile:" ]
[ "Match\", \"Pique Blinders\", \"Pistons from the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\",", "Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\", \"The League of Ordinary Gentlemen\", \"The", "\"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\",", "import time, timedelta # Will print all read events to stdout. DEBUG =", "Will print all read events to stdout. DEBUG = False DATA_PATH = \"~/.tourney\"", "\"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX =", "WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA", "\"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The", "MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\",", "\"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\",", "PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\", \"Dangerous", "\"One Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from the Past\", \"Purple", "from the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\",", "TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking", "stdout. DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5", "= \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA =", "= timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\",", "= 0.5 # seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX", "\"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY = 5.0 #", "\"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES", "timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\",", "Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\",", "Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\", \"The League of Ordinary", "timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\"", "Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\",", "of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons", "50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\",", "# Will print all read events to stdout. DEBUG = False DATA_PATH =", "Flaming Flamingos\", \"The League of Ordinary Gentlemen\", \"The Meme Team\", \"The Mullet Mafia\",", "5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX", "RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\"", "seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX", "\"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\",", "DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 #", "Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One", "\"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\", \"Tenacious", "= \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY = 5.0", "\"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire", "Pints\", \"Straight off the Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\",", "\"The Abusement Park\", \"The Flaming Flamingos\", \"The League of Ordinary Gentlemen\", \"The Meme", "= timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA", "\"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\",", "off the Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\", \"The League", "\"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11)", "seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\"", "MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE", "False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY", "= \"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX", "= [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\",", "Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\", \"Tenacious Turtles\", \"The Abusement Park\",", "\"Straight off the Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\", \"The", "] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [", "Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\", \"Pique", "time, timedelta # Will print all read events to stdout. DEBUG = False", "timedelta # Will print all read events to stdout. DEBUG = False DATA_PATH", "Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of", "\"The Flaming Flamingos\", \"The League of Ordinary Gentlemen\", \"The Meme Team\", \"The Mullet", "SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE", "DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY =", "REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10)", "[ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS =", "RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX =", "\"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our", "\"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\", \"The League of Ordinary Gentlemen\",", "Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured", "= timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\",", "= time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS", "REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [", "[\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated", "= time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\",", "\"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"]", "for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\", \"Tenacious Turtles\", \"The Abusement", "read events to stdout. DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\"", "all read events to stdout. DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME =", "\"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"]", "REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA", "the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty", "\"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES =", "\"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\", \"Tenacious Turtles\", \"The", "= False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 # seconds", "\"Game of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\",", "POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ]", "Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms", "Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from", "\"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal Killers\",", "Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from the Past\", \"Purple Cobras\",", "= 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\"", "COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE", "\"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One Hit", "\"Injured Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from the", "# seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX =", "Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\", \"Tenacious Turtles\",", "= [ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber", "[ \"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\",", "\"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight", "print all read events to stdout. DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME", "Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off", "= \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE =", "\"Air Farce\", \"Cereal Killers\", \"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game", "Reserve\", \"One Hit Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from the Past\",", "0.5 # seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX =", "Wonders\", \"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from the Past\", \"Purple Cobras\", \"Rabid", "\"Pistons from the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\",", "\"The League of Ordinary Gentlemen\", \"The Meme Team\", \"The Mullet Mafia\", \"Thunderpants\", ]", "MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50)", "\"Dangerous Dynamos\", \"Designated Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\",", "Abusement Park\", \"The Flaming Flamingos\", \"The League of Ordinary Gentlemen\", \"The Meme Team\",", "from datetime import time, timedelta # Will print all read events to stdout.", "\"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS", "= \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA =", "\"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\",", "datetime import time, timedelta # Will print all read events to stdout. DEBUG", "to stdout. DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY =", "events to stdout. DEBUG = False DATA_PATH = \"~/.tourney\" CHANNEL_NAME = \"foosball\" RTM_READ_DELAY", "time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11,", "time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\",", "Uniforms Match\", \"Pique Blinders\", \"Pistons from the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging", "= [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\",", "\"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49)", "the Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming Flamingos\", \"The League of", "\"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the", "= [ \"+1\", \"the_horns\", \"metal\", \"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS", "\"Smarty Pints\", \"Straight off the Couch\", \"Tenacious Turtles\", \"The Abusement Park\", \"The Flaming", "CHANNEL_NAME = \"foosball\" RTM_READ_DELAY = 0.5 # seconds RECONNECT_DELAY = 5.0 # seconds", "NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air", "= time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE =", "Park\", \"The Flaming Flamingos\", \"The League of Ordinary Gentlemen\", \"The Meme Team\", \"The", "# seconds RECONNECT_DELAY = 5.0 # seconds COMMAND_REGEX = \"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\"", "\"Our Uniforms Match\", \"Pique Blinders\", \"Pistons from the Past\", \"Purple Cobras\", \"Rabid Squirrels\",", "timedelta(hours=1) REMINDER_ANNOUNCE = time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA =", "MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS = [ \"+1\", \"the_horns\", \"metal\",", "\"!(\\\\w+)\\\\s*(.*)\" REACTION_REGEX = \":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9)", "\"Pique Blinders\", \"Pistons from the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe", "Flamingos\", \"The League of Ordinary Gentlemen\", \"The Meme Team\", \"The Mullet Mafia\", \"Thunderpants\",", "Blinders\", \"Pistons from the Past\", \"Purple Cobras\", \"Rabid Squirrels\", \"Raging Nightmare\", \"Recipe for", "time(11) REMINDER_ANNOUNCE_DELTA = timedelta(minutes=49) MIDDAY_ANNOUNCE = time(11, 50) MIDDAY_ANNOUNCE_DELTA = timedelta(minutes=10) POSITIVE_REACTIONS =", "\":(.+):\" SCORE_ARGS_REGEX = \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1)", "[\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS = [\"undoteams\", \"generate\", \"autoupdate\"] TEAM_NAMES = [ \"Air Farce\", \"Cereal", "Squirrels\", \"Raging Nightmare\", \"Recipe for Disaster\", \"Shockwave\", \"Smarty Pints\", \"Straight off the Couch\",", "= \"(T\\\\d+)\\\\s+(\\\\d+)\\\\s+(T\\\\d+)\\\\s+(\\\\d+)\" WIN_ARGS_REGEX = \"(\\\\d+)\\\\s+(\\\\d+)\" MORNING_ANNOUNCE = time(9) MORNING_ANNOUNCE_DELTA = timedelta(hours=1) REMINDER_ANNOUNCE =", "\"raised_hands\", \"ok\", \"ok_hand\", \"fire\", \"tada\", \"confetti_ball\" ] NEGATIVE_REACTIONS = [\"-1\", \"middle_finger\"] PRIVILEGED_COMMANDS =", "Drinkers\", \"Fire Breaking Rubber Duckies\", \"Game of Throw-ins\", \"Injured Reserve\", \"One Hit Wonders\"," ]
[ "current_day = 9 if user_month >= current_month: birth_month_count = user_month - current_month else:", "your birth month (number): \") user_day = raw_input(\"Enter your birth day (number): \")", "Name: # Date: # proj01: A Simple Program # Part I: # This", "raw_input(\"Enter your grade: \") # grad_year = 12 - int(user_grade) # print user_name", "elif user_dog_count > 0 and user_dog_count <= 3: print \"Good for you!\" else:", "print \"you can watch G, PG and PG-13 rated movies\" else: print \"you", "18: print \"you can watch G, PG and PG-13 rated movies\" else: print", "print user_name + \", you will graduate from high school in\", grad_year, \"years!\"", "# user_grade = raw_input(\"Enter your grade: \") # grad_year = 12 - int(user_grade)", "= user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter your birth month", "birth day (number): \") user_month = int(user_month) user_day = int(user_day) current_month = 7", "\"you can watch G, PG, PG-13 and R rated movies\" user_dog_count = raw_input(\"How", "the number of days and months until their birthday # user_name_gram = user_name[0:1].upper()", "\"you can watch G, PG and PG-13 rated movies\" else: print \"you can", "(number): \") user_month = int(user_month) user_day = int(user_day) current_month = 7 current_day =", "user_age < 13: print \"you can watch G and PG rated movies\" elif", ">= current_month: birth_month_count = user_month - current_month else: birth_month_count = 12 - (current_month", "proj01: A Simple Program # Part I: # This program asks the user", "and user_age < 13: print \"you can watch G and PG rated movies\"", "= int(user_dog_count) if user_dog_count == 0: print \"I suggest you get a dog,", "+ user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter your birth month (number): \")", "< 18: print \"you can watch G, PG and PG-13 rated movies\" else:", "(user_day - current_day) birth_month_count = birth_month_count - 1 print user_name + \", your", "Date: # proj01: A Simple Program # Part I: # This program asks", "age: \") user_age = int(user_age) if user_age <= 7: print \"you can only", "user_day = raw_input(\"Enter your birth day (number): \") user_month = int(user_month) user_day =", "= raw_input(\"Enter your grade: \") # grad_year = 12 - int(user_grade) # print", "user_age = int(user_age) if user_age <= 7: print \"you can only watch G", "user_age > 7 and user_age < 13: print \"you can watch G and", "print \"you can watch G, PG, PG-13 and R rated movies\" user_dog_count =", "program asks the user for his/her name and grade. #Then, it prints out", "user_grade = raw_input(\"Enter your grade: \") # grad_year = 12 - int(user_grade) #", "= raw_input(\"Enter your age: \") user_age = int(user_age) if user_age <= 7: print", "# Part I: # This program asks the user for his/her name and", "raw_input(\"Enter your birth day (number): \") user_month = int(user_month) user_day = int(user_day) current_month", "elif user_age > 7 and user_age < 13: print \"you can watch G", "user_name = raw_input(\"Enter your name: \") # user_grade = raw_input(\"Enter your grade: \")", "for his/her name and grade. #Then, it prints out a sentence that says", "- 1 print user_name + \", your birthday is in \", birth_month_count, \"months", "movies\" elif user_age > 7 and user_age < 13: print \"you can watch", "is in \", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your age:", "7: print \"you can only watch G rated movies\" elif user_age > 7", "birthday is in \", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your", "7 and user_age < 13: print \"you can watch G and PG rated", "out a sentence that says the number of years until they graduate. #var", "birth_day_count, \"days!\" user_age = raw_input(\"Enter your age: \") user_age = int(user_age) if user_age", "user_age >= 13 and user_age < 18: print \"you can watch G, PG", "raw_input(\"Enter your name: \") # user_grade = raw_input(\"Enter your grade: \") # grad_year", "user_name + \", you will graduate from high school in\", grad_year, \"years!\" #", "int(user_month) user_day = int(user_day) current_month = 7 current_day = 9 if user_month >=", "from high school in\", grad_year, \"years!\" # # Part II: # # This", "if user_dog_count == 0: print \"I suggest you get a dog, they are", "days and months until their birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower() #", "day (number): \") user_month = int(user_month) user_day = int(user_day) current_month = 7 current_day", "G rated movies\" elif user_age > 7 and user_age < 13: print \"you", "and months until their birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print", "PG, PG-13 and R rated movies\" user_dog_count = raw_input(\"How many dogs do you", "\"Wow that's a lot of dogs!\" # If you complete extensions, describe your", "\"I suggest you get a dog, they are really fun!\" elif user_dog_count >", "G, PG and PG-13 rated movies\" else: print \"you can watch G, PG,", "birth_month_count - 1 print user_name + \", your birthday is in \", birth_month_count,", "= int(user_age) if user_age <= 7: print \"you can only watch G rated", "user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter your birth month (number):", "graduate. #var name user_name = raw_input(\"Enter your name: \") # user_grade = raw_input(\"Enter", "name: \") # user_grade = raw_input(\"Enter your grade: \") # grad_year = 12", "= user_day - current_day else: birth_day_count = 30 - (user_day - current_day) birth_month_count", "+ \", you will graduate from high school in\", grad_year, \"years!\" # #", "movies\" elif user_age >= 13 and user_age < 18: print \"you can watch", "This program asks the user for his/her name and grade. #Then, it prints", "they graduate. #var name user_name = raw_input(\"Enter your name: \") # user_grade =", "says the number of years until they graduate. #var name user_name = raw_input(\"Enter", "birth month (number): \") user_day = raw_input(\"Enter your birth day (number): \") user_month", "== 0: print \"I suggest you get a dog, they are really fun!\"", "fun!\" elif user_dog_count > 0 and user_dog_count <= 3: print \"Good for you!\"", "can watch G and PG rated movies\" elif user_age >= 13 and user_age", "suggest you get a dog, they are really fun!\" elif user_dog_count > 0", "- current_day) birth_month_count = birth_month_count - 1 print user_name + \", your birthday", "0 and user_dog_count <= 3: print \"Good for you!\" else: print \"Wow that's", "PG and PG-13 rated movies\" else: print \"you can watch G, PG, PG-13", "watch G, PG and PG-13 rated movies\" else: print \"you can watch G,", "and R rated movies\" user_dog_count = raw_input(\"How many dogs do you have?: \")", "# proj01: A Simple Program # Part I: # This program asks the", "rated movies\" else: print \"you can watch G, PG, PG-13 and R rated", "current_month: birth_month_count = user_month - current_month else: birth_month_count = 12 - (current_month -", "it prints a sentence that says the number of days and months until", "are really fun!\" elif user_dog_count > 0 and user_dog_count <= 3: print \"Good", "your birth day (number): \") user_month = int(user_month) user_day = int(user_day) current_month =", "print \"you can watch G and PG rated movies\" elif user_age >= 13", "# print user_name_gram user_month = raw_input(\"Enter your birth month (number): \") user_day =", "user_dog_count = int(user_dog_count) if user_dog_count == 0: print \"I suggest you get a", "of days and months until their birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower()", "print \"Good for you!\" else: print \"Wow that's a lot of dogs!\" #", "if user_day >= current_day: birth_day_count = user_day - current_day else: birth_day_count = 30", "user_month - current_month else: birth_month_count = 12 - (current_month - user_month) if user_day", "= birth_month_count - 1 print user_name + \", your birthday is in \",", "# grad_year = 12 - int(user_grade) # print user_name + \", you will", "your birthday is in \", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter", "get a dog, they are really fun!\" elif user_dog_count > 0 and user_dog_count", "\", your birthday is in \", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age =", "current_day else: birth_day_count = 30 - (user_day - current_day) birth_month_count = birth_month_count -", "and user_age < 18: print \"you can watch G, PG and PG-13 rated", "and user_dog_count <= 3: print \"Good for you!\" else: print \"Wow that's a", "#var name user_name = raw_input(\"Enter your name: \") # user_grade = raw_input(\"Enter your", "rated movies\" elif user_age >= 13 and user_age < 18: print \"you can", "# print user_name + \", you will graduate from high school in\", grad_year,", "print user_name_gram user_month = raw_input(\"Enter your birth month (number): \") user_day = raw_input(\"Enter", "movies\" else: print \"you can watch G, PG, PG-13 and R rated movies\"", "and PG-13 rated movies\" else: print \"you can watch G, PG, PG-13 and", "#Then, it prints out a sentence that says the number of years until", "R rated movies\" user_dog_count = raw_input(\"How many dogs do you have?: \") user_dog_count", "your grade: \") # grad_year = 12 - int(user_grade) # print user_name +", "int(user_grade) # print user_name + \", you will graduate from high school in\",", "= 12 - (current_month - user_month) if user_day >= current_day: birth_day_count = user_day", "13: print \"you can watch G and PG rated movies\" elif user_age >=", "\"you can watch G and PG rated movies\" elif user_age >= 13 and", "years until they graduate. #var name user_name = raw_input(\"Enter your name: \") #", "\") user_month = int(user_month) user_day = int(user_day) current_month = 7 current_day = 9", "= 7 current_day = 9 if user_month >= current_month: birth_month_count = user_month -", "user_dog_count == 0: print \"I suggest you get a dog, they are really", "a sentence that says the number of days and months until their birthday", "print user_name + \", your birthday is in \", birth_month_count, \"months and\", birth_day_count,", "months until their birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram", "= 30 - (user_day - current_day) birth_month_count = birth_month_count - 1 print user_name", "user_dog_count > 0 and user_dog_count <= 3: print \"Good for you!\" else: print", "12 - (current_month - user_month) if user_day >= current_day: birth_day_count = user_day -", "- current_month else: birth_month_count = 12 - (current_month - user_month) if user_day >=", "user for his/her name and grade. #Then, it prints out a sentence that", "that says the number of years until they graduate. #var name user_name =", "birth month. # # Then, it prints a sentence that says the number", "= raw_input(\"Enter your birth day (number): \") user_month = int(user_month) user_day = int(user_day)", "can only watch G rated movies\" elif user_age > 7 and user_age <", "int(user_dog_count) if user_dog_count == 0: print \"I suggest you get a dog, they", "in \", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your age: \")", "This program asks the user for his/her name and birth month. # #", "asks the user for his/her name and grade. #Then, it prints out a", "user for his/her name and birth month. # # Then, it prints a", "3: print \"Good for you!\" else: print \"Wow that's a lot of dogs!\"", "current_day) birth_month_count = birth_month_count - 1 print user_name + \", your birthday is", "# This program asks the user for his/her name and birth month. #", "PG rated movies\" elif user_age >= 13 and user_age < 18: print \"you", "# Name: # Date: # proj01: A Simple Program # Part I: #", "birth_month_count = user_month - current_month else: birth_month_count = 12 - (current_month - user_month)", "until they graduate. #var name user_name = raw_input(\"Enter your name: \") # user_grade", "G and PG rated movies\" elif user_age >= 13 and user_age < 18:", "print \"I suggest you get a dog, they are really fun!\" elif user_dog_count", "that's a lot of dogs!\" # If you complete extensions, describe your extensions", "= raw_input(\"How many dogs do you have?: \") user_dog_count = int(user_dog_count) if user_dog_count", "watch G rated movies\" elif user_age > 7 and user_age < 13: print", "= raw_input(\"Enter your name: \") # user_grade = raw_input(\"Enter your grade: \") #", "current_month = 7 current_day = 9 if user_month >= current_month: birth_month_count = user_month", "until their birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month", "user_age = raw_input(\"Enter your age: \") user_age = int(user_age) if user_age <= 7:", "- current_day else: birth_day_count = 30 - (user_day - current_day) birth_month_count = birth_month_count", "a sentence that says the number of years until they graduate. #var name", "prints a sentence that says the number of days and months until their", "current_day: birth_day_count = user_day - current_day else: birth_day_count = 30 - (user_day -", "dogs do you have?: \") user_dog_count = int(user_dog_count) if user_dog_count == 0: print", "the number of years until they graduate. #var name user_name = raw_input(\"Enter your", "7 current_day = 9 if user_month >= current_month: birth_month_count = user_month - current_month", "if user_month >= current_month: birth_month_count = user_month - current_month else: birth_month_count = 12", "A Simple Program # Part I: # This program asks the user for", "raw_input(\"Enter your birth month (number): \") user_day = raw_input(\"Enter your birth day (number):", "you!\" else: print \"Wow that's a lot of dogs!\" # If you complete", "month (number): \") user_day = raw_input(\"Enter your birth day (number): \") user_month =", "PG-13 and R rated movies\" user_dog_count = raw_input(\"How many dogs do you have?:", "can watch G, PG and PG-13 rated movies\" else: print \"you can watch", "many dogs do you have?: \") user_dog_count = int(user_dog_count) if user_dog_count == 0:", "= raw_input(\"Enter your birth month (number): \") user_day = raw_input(\"Enter your birth day", "for his/her name and birth month. # # Then, it prints a sentence", "> 7 and user_age < 13: print \"you can watch G and PG", "\"you can only watch G rated movies\" elif user_age > 7 and user_age", "birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter", "= 12 - int(user_grade) # print user_name + \", you will graduate from", ">= current_day: birth_day_count = user_day - current_day else: birth_day_count = 30 - (user_day", "user_name + \", your birthday is in \", birth_month_count, \"months and\", birth_day_count, \"days!\"", "\") user_day = raw_input(\"Enter your birth day (number): \") user_month = int(user_month) user_day", "in\", grad_year, \"years!\" # # Part II: # # This program asks the", "birth_month_count = 12 - (current_month - user_month) if user_day >= current_day: birth_day_count =", "his/her name and grade. #Then, it prints out a sentence that says the", "# # This program asks the user for his/her name and birth month.", "birth_month_count = birth_month_count - 1 print user_name + \", your birthday is in", "watch G, PG, PG-13 and R rated movies\" user_dog_count = raw_input(\"How many dogs", "PG-13 rated movies\" else: print \"you can watch G, PG, PG-13 and R", "print \"Wow that's a lot of dogs!\" # If you complete extensions, describe", "user_day >= current_day: birth_day_count = user_day - current_day else: birth_day_count = 30 -", "Simple Program # Part I: # This program asks the user for his/her", "Then, it prints a sentence that says the number of days and months", "\") # grad_year = 12 - int(user_grade) # print user_name + \", you", "else: birth_day_count = 30 - (user_day - current_day) birth_month_count = birth_month_count - 1", "else: print \"Wow that's a lot of dogs!\" # If you complete extensions,", "you get a dog, they are really fun!\" elif user_dog_count > 0 and", "1 print user_name + \", your birthday is in \", birth_month_count, \"months and\",", "rated movies\" user_dog_count = raw_input(\"How many dogs do you have?: \") user_dog_count =", "# This program asks the user for his/her name and grade. #Then, it", "Program # Part I: # This program asks the user for his/her name", "user_dog_count <= 3: print \"Good for you!\" else: print \"Wow that's a lot", "name user_name = raw_input(\"Enter your name: \") # user_grade = raw_input(\"Enter your grade:", "# Then, it prints a sentence that says the number of days and", "user_day = int(user_day) current_month = 7 current_day = 9 if user_month >= current_month:", "school in\", grad_year, \"years!\" # # Part II: # # This program asks", "watch G and PG rated movies\" elif user_age >= 13 and user_age <", "= 9 if user_month >= current_month: birth_month_count = user_month - current_month else: birth_month_count", "user_dog_count = raw_input(\"How many dogs do you have?: \") user_dog_count = int(user_dog_count) if", "(number): \") user_day = raw_input(\"Enter your birth day (number): \") user_month = int(user_month)", "- (user_day - current_day) birth_month_count = birth_month_count - 1 print user_name + \",", "\"years!\" # # Part II: # # This program asks the user for", "= int(user_month) user_day = int(user_day) current_month = 7 current_day = 9 if user_month", "and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your age: \") user_age = int(user_age) if", "grade: \") # grad_year = 12 - int(user_grade) # print user_name + \",", "only watch G rated movies\" elif user_age > 7 and user_age < 13:", "else: print \"you can watch G, PG, PG-13 and R rated movies\" user_dog_count", "high school in\", grad_year, \"years!\" # # Part II: # # This program", "\", you will graduate from high school in\", grad_year, \"years!\" # # Part", "user_month) if user_day >= current_day: birth_day_count = user_day - current_day else: birth_day_count =", "that says the number of days and months until their birthday # user_name_gram", "it prints out a sentence that says the number of years until they", "a lot of dogs!\" # If you complete extensions, describe your extensions here!", "II: # # This program asks the user for his/her name and birth", "9 if user_month >= current_month: birth_month_count = user_month - current_month else: birth_month_count =", "says the number of days and months until their birthday # user_name_gram =", "G, PG, PG-13 and R rated movies\" user_dog_count = raw_input(\"How many dogs do", "will graduate from high school in\", grad_year, \"years!\" # # Part II: #", "you have?: \") user_dog_count = int(user_dog_count) if user_dog_count == 0: print \"I suggest", "<= 7: print \"you can only watch G rated movies\" elif user_age >", "int(user_age) if user_age <= 7: print \"you can only watch G rated movies\"", "sentence that says the number of days and months until their birthday #", "you will graduate from high school in\", grad_year, \"years!\" # # Part II:", "program asks the user for his/her name and birth month. # # Then,", "asks the user for his/her name and birth month. # # Then, it", "Part I: # This program asks the user for his/her name and grade.", "the user for his/her name and birth month. # # Then, it prints", "raw_input(\"Enter your age: \") user_age = int(user_age) if user_age <= 7: print \"you", "- user_month) if user_day >= current_day: birth_day_count = user_day - current_day else: birth_day_count", "\"days!\" user_age = raw_input(\"Enter your age: \") user_age = int(user_age) if user_age <=", "name and grade. #Then, it prints out a sentence that says the number", "> 0 and user_dog_count <= 3: print \"Good for you!\" else: print \"Wow", "grad_year = 12 - int(user_grade) # print user_name + \", you will graduate", "user_age <= 7: print \"you can only watch G rated movies\" elif user_age", "do you have?: \") user_dog_count = int(user_dog_count) if user_dog_count == 0: print \"I", "user_month >= current_month: birth_month_count = user_month - current_month else: birth_month_count = 12 -", "(current_month - user_month) if user_day >= current_day: birth_day_count = user_day - current_day else:", ">= 13 and user_age < 18: print \"you can watch G, PG and", "user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter your birth", "birth_day_count = user_day - current_day else: birth_day_count = 30 - (user_day - current_day)", "movies\" user_dog_count = raw_input(\"How many dogs do you have?: \") user_dog_count = int(user_dog_count)", "the user for his/her name and grade. #Then, it prints out a sentence", "dog, they are really fun!\" elif user_dog_count > 0 and user_dog_count <= 3:", "print \"you can only watch G rated movies\" elif user_age > 7 and", "if user_age <= 7: print \"you can only watch G rated movies\" elif", "\"Good for you!\" else: print \"Wow that's a lot of dogs!\" # If", "prints out a sentence that says the number of years until they graduate.", "\", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your age: \") user_age", "= user_month - current_month else: birth_month_count = 12 - (current_month - user_month) if", "your age: \") user_age = int(user_age) if user_age <= 7: print \"you can", "30 - (user_day - current_day) birth_month_count = birth_month_count - 1 print user_name +", "- (current_month - user_month) if user_day >= current_day: birth_day_count = user_day - current_day", "- int(user_grade) # print user_name + \", you will graduate from high school", "month. # # Then, it prints a sentence that says the number of", "their birthday # user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month =", "int(user_day) current_month = 7 current_day = 9 if user_month >= current_month: birth_month_count =", "have?: \") user_dog_count = int(user_dog_count) if user_dog_count == 0: print \"I suggest you", "0: print \"I suggest you get a dog, they are really fun!\" elif", "they are really fun!\" elif user_dog_count > 0 and user_dog_count <= 3: print", "and PG rated movies\" elif user_age >= 13 and user_age < 18: print", "Part II: # # This program asks the user for his/her name and", "and grade. #Then, it prints out a sentence that says the number of", "# user_name_gram = user_name[0:1].upper() + user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter your", "user_day - current_day else: birth_day_count = 30 - (user_day - current_day) birth_month_count =", "user_month = int(user_month) user_day = int(user_day) current_month = 7 current_day = 9 if", "for you!\" else: print \"Wow that's a lot of dogs!\" # If you", "current_month else: birth_month_count = 12 - (current_month - user_month) if user_day >= current_day:", "number of days and months until their birthday # user_name_gram = user_name[0:1].upper() +", "grad_year, \"years!\" # # Part II: # # This program asks the user", "# Date: # proj01: A Simple Program # Part I: # This program", "<= 3: print \"Good for you!\" else: print \"Wow that's a lot of", "a dog, they are really fun!\" elif user_dog_count > 0 and user_dog_count <=", "rated movies\" elif user_age > 7 and user_age < 13: print \"you can", "# # Then, it prints a sentence that says the number of days", "of years until they graduate. #var name user_name = raw_input(\"Enter your name: \")", "< 13: print \"you can watch G and PG rated movies\" elif user_age", "birth_day_count = 30 - (user_day - current_day) birth_month_count = birth_month_count - 1 print", "\"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your age: \") user_age = int(user_age)", "13 and user_age < 18: print \"you can watch G, PG and PG-13", "his/her name and birth month. # # Then, it prints a sentence that", "else: birth_month_count = 12 - (current_month - user_month) if user_day >= current_day: birth_day_count", "user_age < 18: print \"you can watch G, PG and PG-13 rated movies\"", "elif user_age >= 13 and user_age < 18: print \"you can watch G,", "user_name_gram user_month = raw_input(\"Enter your birth month (number): \") user_day = raw_input(\"Enter your", "# # Part II: # # This program asks the user for his/her", "\") user_dog_count = int(user_dog_count) if user_dog_count == 0: print \"I suggest you get", "sentence that says the number of years until they graduate. #var name user_name", "number of years until they graduate. #var name user_name = raw_input(\"Enter your name:", "grade. #Then, it prints out a sentence that says the number of years", "\") # user_grade = raw_input(\"Enter your grade: \") # grad_year = 12 -", "graduate from high school in\", grad_year, \"years!\" # # Part II: # #", "really fun!\" elif user_dog_count > 0 and user_dog_count <= 3: print \"Good for", "user_month = raw_input(\"Enter your birth month (number): \") user_day = raw_input(\"Enter your birth", "and birth month. # # Then, it prints a sentence that says the", "= int(user_day) current_month = 7 current_day = 9 if user_month >= current_month: birth_month_count", "12 - int(user_grade) # print user_name + \", you will graduate from high", "# Part II: # # This program asks the user for his/her name", "+ \", your birthday is in \", birth_month_count, \"months and\", birth_day_count, \"days!\" user_age", "\") user_age = int(user_age) if user_age <= 7: print \"you can only watch", "can watch G, PG, PG-13 and R rated movies\" user_dog_count = raw_input(\"How many", "birth_month_count, \"months and\", birth_day_count, \"days!\" user_age = raw_input(\"Enter your age: \") user_age =", "I: # This program asks the user for his/her name and grade. #Then,", "your name: \") # user_grade = raw_input(\"Enter your grade: \") # grad_year =", "raw_input(\"How many dogs do you have?: \") user_dog_count = int(user_dog_count) if user_dog_count ==", "name and birth month. # # Then, it prints a sentence that says", "user_name[1:].lower() # print user_name_gram user_month = raw_input(\"Enter your birth month (number): \") user_day" ]
[ "Pak:\") response = input() #get the file, create a world, return the world", "print(\"Moves: \" + str(moves) + \" | Score: \" + str(score)) print(\"\\n\" +", "str(moves) + \" | Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"] + \"", "return the world response = \"json/\"+response+\".json\" file = open(response, ) world = json.load(file)", "in world: for passage in world[\"passages\"]: if location_label == passage[\"name\"]: return passage return", "(render the world) print(\"Moves: \" + str(moves) + \" | Score: \" +", "matches a link for link in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else:", "link[\"passageName\"] else: print(\"Option not found.\") return location_label # ---------------------------------------------------------------- world = select_game() location_label", "= {} response = \"\" score = 0 moves = 0 while True:", "---------------------------------------------------------------- def select_game(): #Get all json filenames within the json folder path_to_json_files =", "and \"cleanText\" in current_location: # Display passage (render the world) print(\"Moves: \" +", "links for link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \" - \"", "link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \" - \" + link[\"passageName\"])", "see if response matches a link for link in current_location[\"links\"]: if(response == link[\"linkText\"]):", "not found.\") return location_label # ---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location", "= input(\"Enter option: \") return response.upper().strip() def update(current_location, location_label, response): #if there is", "the current_location if \"links\" in current_location: #for each link, see if response matches", "find_current_location(location_label): if \"passages\" in world: for passage in world[\"passages\"]: if location_label == passage[\"name\"]:", "# ---------------------------------------------------------------- def render(current_location, score, moves): if \"name\" in current_location and \"cleanText\" in", "script requires at least Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get all json", "while True: if response in json_files: break print(\"You have the following Game Paks:\")", "open(response, ) world = json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\"", "get_input(): response = input(\"Enter option: \") return response.upper().strip() def update(current_location, location_label, response): #if", "True: if response == \"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"] location_label =", "location_label # see if there are links in the current_location if \"links\" in", "if \"links\" in current_location: #for each link, see if response matches a link", "# ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world: for passage in world[\"passages\"]: if", "in json_files] response = \"\" #Allow player to select json file while True:", "the world response = \"json/\"+response+\".json\" file = open(response, ) world = json.load(file) return", "= json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world: for", "links in the current_location if \"links\" in current_location: #for each link, see if", "<filename>main.py #!/usr/bin/env python3 import sys import json import os assert sys.version_info >= (3,9),", "if \"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location = find_current_location(location_label)", "if there are links in the current_location if \"links\" in current_location: #for each", "0 moves = 0 while True: if response == \"QUIT\": break if \"score\"", "os assert sys.version_info >= (3,9), \"This script requires at least Python 3.9\" #", "+ link[\"linkText\"] + \" - \" + link[\"passageName\"]) def get_input(): response = input(\"Enter", "all passage links for link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \"", "passage[\"name\"]: return passage return {} # ---------------------------------------------------------------- def render(current_location, score, moves): if \"name\"", "\"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location", "3.9\" # ---------------------------------------------------------------- def select_game(): #Get all json filenames within the json folder", "= select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {} response = \"\" score =", "is no response, return location_label argument if response == \"\": return location_label #", ") world = json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in", "there are links in the current_location if \"links\" in current_location: #for each link,", "== \"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response)", "update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location, score, moves) response = get_input() moves+=1", "option: \") return response.upper().strip() def update(current_location, location_label, response): #if there is no response,", "+ str(moves) + \" | Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"] +", "if response in json_files: break print(\"You have the following Game Paks:\") print(json_files) print(\"Select", "the following Game Paks:\") print(json_files) print(\"Select a Game Pak:\") response = input() #get", "argument if response == \"\": return location_label # see if there are links", "= \"\" #Allow player to select json file while True: if response in", "world: for passage in world[\"passages\"]: if location_label == passage[\"name\"]: return passage return {}", "print(\"Option not found.\") return location_label # ---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"]", "in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file in json_files] response", "for json_file in json_files] response = \"\" #Allow player to select json file", "have the following Game Paks:\") print(json_files) print(\"Select a Game Pak:\") response = input()", "are links in the current_location if \"links\" in current_location: #for each link, see", "found.\") return location_label # ---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location =", "= open(response, ) world = json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label): if", "\"\" #Allow player to select json file while True: if response in json_files:", "passage return {} # ---------------------------------------------------------------- def render(current_location, score, moves): if \"name\" in current_location", "world[\"passages\"][0][\"name\"] current_location = {} response = \"\" score = 0 moves = 0", "os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file in json_files] response =", "True: if response in json_files: break print(\"You have the following Game Paks:\") print(json_files)", "def select_game(): #Get all json filenames within the json folder path_to_json_files = \"json/\"", "\" + link[\"passageName\"]) def get_input(): response = input(\"Enter option: \") return response.upper().strip() def", "for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file in", "location_label = update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location, score, moves) response =", "to select json file while True: if response in json_files: break print(\"You have", "return response.upper().strip() def update(current_location, location_label, response): #if there is no response, return location_label", "\" - \" + link[\"passageName\"]) def get_input(): response = input(\"Enter option: \") return", "0 while True: if response == \"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"]", "print(json_files) print(\"Select a Game Pak:\") response = input() #get the file, create a", "score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location, score, moves) response", "select_game(): #Get all json filenames within the json folder path_to_json_files = \"json/\" json_files", "- \" + link[\"passageName\"]) def get_input(): response = input(\"Enter option: \") return response.upper().strip()", "current_location if \"links\" in current_location: #for each link, see if response matches a", "#for each link, see if response matches a link for link in current_location[\"links\"]:", "response = \"json/\"+response+\".json\" file = open(response, ) world = json.load(file) return world #", "json_files = [json_file.replace(\".json\", \"\") for json_file in json_files] response = \"\" #Allow player", "response = input() #get the file, create a world, return the world response", "in current_location and \"cleanText\" in current_location: # Display passage (render the world) print(\"Moves:", "{} # ---------------------------------------------------------------- def render(current_location, score, moves): if \"name\" in current_location and \"cleanText\"", "\"json/\" json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\",", "location_label = world[\"passages\"][0][\"name\"] current_location = {} response = \"\" score = 0 moves", "Print all passage links for link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] +", "def get_input(): response = input(\"Enter option: \") return response.upper().strip() def update(current_location, location_label, response):", "in the current_location if \"links\" in current_location: #for each link, see if response", "import os assert sys.version_info >= (3,9), \"This script requires at least Python 3.9\"", "= [json_file.replace(\".json\", \"\") for json_file in json_files] response = \"\" #Allow player to", "score, moves): if \"name\" in current_location and \"cleanText\" in current_location: # Display passage", "\" + current_location[\"cleanText\"]) # Print all passage links for link in current_location[\"links\"]: print(\"", "def render(current_location, score, moves): if \"name\" in current_location and \"cleanText\" in current_location: #", "create a world, return the world response = \"json/\"+response+\".json\" file = open(response, )", "[pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file", "world, return the world response = \"json/\"+response+\".json\" file = open(response, ) world =", "= \"json/\" json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files =", "if response matches a link for link in current_location[\"links\"]: if(response == link[\"linkText\"]): return", "return location_label # ---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {}", "return {} # ---------------------------------------------------------------- def render(current_location, score, moves): if \"name\" in current_location and", "\" - \" + current_location[\"cleanText\"]) # Print all passage links for link in", "->\" + link[\"linkText\"] + \" - \" + link[\"passageName\"]) def get_input(): response =", "response matches a link for link in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"]", "= [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for", "\"\") for json_file in json_files] response = \"\" #Allow player to select json", "if location_label == passage[\"name\"]: return passage return {} # ---------------------------------------------------------------- def render(current_location, score,", "# ---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {} response =", "= find_current_location(location_label) render(current_location, score, moves) response = get_input() moves+=1 print(\"Thank you for playing!\")", "(3,9), \"This script requires at least Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get", "response = input(\"Enter option: \") return response.upper().strip() def update(current_location, location_label, response): #if there", "world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world: for passage in world[\"passages\"]:", "\"This script requires at least Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get all", "[json_file.replace(\".json\", \"\") for json_file in json_files] response = \"\" #Allow player to select", "#if there is no response, return location_label argument if response == \"\": return", "# Print all passage links for link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"]", "assert sys.version_info >= (3,9), \"This script requires at least Python 3.9\" # ----------------------------------------------------------------", "passage (render the world) print(\"Moves: \" + str(moves) + \" | Score: \"", "link[\"linkText\"] + \" - \" + link[\"passageName\"]) def get_input(): response = input(\"Enter option:", "\") return response.upper().strip() def update(current_location, location_label, response): #if there is no response, return", "in current_location: #for each link, see if response matches a link for link", "return link[\"passageName\"] else: print(\"Option not found.\") return location_label # ---------------------------------------------------------------- world = select_game()", "world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {} response = \"\" score", "response == \"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label,", "# Display passage (render the world) print(\"Moves: \" + str(moves) + \" |", "for link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \" - \" +", "json filenames within the json folder path_to_json_files = \"json/\" json_files = [pos_json for", "= input() #get the file, create a world, return the world response =", "---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {} response = \"\"", "\"\" score = 0 moves = 0 while True: if response == \"QUIT\":", "within the json folder path_to_json_files = \"json/\" json_files = [pos_json for pos_json in", "= 0 while True: if response == \"QUIT\": break if \"score\" in current_location:", "while True: if response == \"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"] location_label", "requires at least Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get all json filenames", "passage links for link in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \" -", "there is no response, return location_label argument if response == \"\": return location_label", "if response == \"QUIT\": break if \"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location,", "return passage return {} # ---------------------------------------------------------------- def render(current_location, score, moves): if \"name\" in", "no response, return location_label argument if response == \"\": return location_label # see", "json import os assert sys.version_info >= (3,9), \"This script requires at least Python", "- \" + current_location[\"cleanText\"]) # Print all passage links for link in current_location[\"links\"]:", "location_label, response): #if there is no response, return location_label argument if response ==", "current_location: # Display passage (render the world) print(\"Moves: \" + str(moves) + \"", "response in json_files: break print(\"You have the following Game Paks:\") print(json_files) print(\"Select a", "response, return location_label argument if response == \"\": return location_label # see if", "in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\") return location_label", "player to select json file while True: if response in json_files: break print(\"You", "link, see if response matches a link for link in current_location[\"links\"]: if(response ==", "the world) print(\"Moves: \" + str(moves) + \" | Score: \" + str(score))", "str(score)) print(\"\\n\" + current_location[\"name\"] + \" - \" + current_location[\"cleanText\"]) # Print all", "else: print(\"Option not found.\") return location_label # ---------------------------------------------------------------- world = select_game() location_label =", "passage in world[\"passages\"]: if location_label == passage[\"name\"]: return passage return {} # ----------------------------------------------------------------", "location_label # ---------------------------------------------------------------- world = select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {} response", "\"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location,", "\" + str(score)) print(\"\\n\" + current_location[\"name\"] + \" - \" + current_location[\"cleanText\"]) #", "if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\") return location_label # ----------------------------------------------------------------", "render(current_location, score, moves): if \"name\" in current_location and \"cleanText\" in current_location: # Display", "world response = \"json/\"+response+\".json\" file = open(response, ) world = json.load(file) return world", "response = \"\" score = 0 moves = 0 while True: if response", "moves = 0 while True: if response == \"QUIT\": break if \"score\" in", "#Get all json filenames within the json folder path_to_json_files = \"json/\" json_files =", "print(\"You have the following Game Paks:\") print(json_files) print(\"Select a Game Pak:\") response =", "in current_location: # Display passage (render the world) print(\"Moves: \" + str(moves) +", "each link, see if response matches a link for link in current_location[\"links\"]: if(response", "= update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location, score, moves) response = get_input()", "json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world: for passage", "current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \" - \" + link[\"passageName\"]) def get_input():", "location_label argument if response == \"\": return location_label # see if there are", "if \"name\" in current_location and \"cleanText\" in current_location: # Display passage (render the", "sys import json import os assert sys.version_info >= (3,9), \"This script requires at", "\" + str(moves) + \" | Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"]", "return location_label argument if response == \"\": return location_label # see if there", "in json_files: break print(\"You have the following Game Paks:\") print(json_files) print(\"Select a Game", "if response == \"\": return location_label # see if there are links in", "+ \" | Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"] + \" -", "for link in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\")", "---------------------------------------------------------------- def render(current_location, score, moves): if \"name\" in current_location and \"cleanText\" in current_location:", "current_location and \"cleanText\" in current_location: # Display passage (render the world) print(\"Moves: \"", "world[\"passages\"]: if location_label == passage[\"name\"]: return passage return {} # ---------------------------------------------------------------- def render(current_location,", "select_game() location_label = world[\"passages\"][0][\"name\"] current_location = {} response = \"\" score = 0", "# ---------------------------------------------------------------- def select_game(): #Get all json filenames within the json folder path_to_json_files", "a Game Pak:\") response = input() #get the file, create a world, return", "== passage[\"name\"]: return passage return {} # ---------------------------------------------------------------- def render(current_location, score, moves): if", "Paks:\") print(json_files) print(\"Select a Game Pak:\") response = input() #get the file, create", "+ str(score)) print(\"\\n\" + current_location[\"name\"] + \" - \" + current_location[\"cleanText\"]) # Print", "response.upper().strip() def update(current_location, location_label, response): #if there is no response, return location_label argument", "+ link[\"passageName\"]) def get_input(): response = input(\"Enter option: \") return response.upper().strip() def update(current_location,", "response): #if there is no response, return location_label argument if response == \"\":", "python3 import sys import json import os assert sys.version_info >= (3,9), \"This script", "a world, return the world response = \"json/\"+response+\".json\" file = open(response, ) world", "---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world: for passage in world[\"passages\"]: if location_label", "\"links\" in current_location: #for each link, see if response matches a link for", "= world[\"passages\"][0][\"name\"] current_location = {} response = \"\" score = 0 moves =", "current_location = find_current_location(location_label) render(current_location, score, moves) response = get_input() moves+=1 print(\"Thank you for", "the file, create a world, return the world response = \"json/\"+response+\".json\" file =", "\"name\" in current_location and \"cleanText\" in current_location: # Display passage (render the world)", "response == \"\": return location_label # see if there are links in the", "response = \"\" #Allow player to select json file while True: if response", "world = json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world:", "json_file in json_files] response = \"\" #Allow player to select json file while", "Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"] + \" - \" + current_location[\"cleanText\"])", "import json import os assert sys.version_info >= (3,9), \"This script requires at least", "json folder path_to_json_files = \"json/\" json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if", "in current_location[\"links\"]: print(\" ->\" + link[\"linkText\"] + \" - \" + link[\"passageName\"]) def", "all json filenames within the json folder path_to_json_files = \"json/\" json_files = [pos_json", "print(\" ->\" + link[\"linkText\"] + \" - \" + link[\"passageName\"]) def get_input(): response", "a link for link in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option", "input(\"Enter option: \") return response.upper().strip() def update(current_location, location_label, response): #if there is no", "location_label, response) current_location = find_current_location(location_label) render(current_location, score, moves) response = get_input() moves+=1 print(\"Thank", "current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\") return location_label #", "Game Paks:\") print(json_files) print(\"Select a Game Pak:\") response = input() #get the file,", "#!/usr/bin/env python3 import sys import json import os assert sys.version_info >= (3,9), \"This", "input() #get the file, create a world, return the world response = \"json/\"+response+\".json\"", "if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file in json_files] response = \"\"", "in world[\"passages\"]: if location_label == passage[\"name\"]: return passage return {} # ---------------------------------------------------------------- def", "score = 0 moves = 0 while True: if response == \"QUIT\": break", "== \"\": return location_label # see if there are links in the current_location", "print(\"\\n\" + current_location[\"name\"] + \" - \" + current_location[\"cleanText\"]) # Print all passage", "Display passage (render the world) print(\"Moves: \" + str(moves) + \" | Score:", "pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file in json_files]", "Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get all json filenames within the json", "for passage in world[\"passages\"]: if location_label == passage[\"name\"]: return passage return {} #", "print(\"Select a Game Pak:\") response = input() #get the file, create a world,", "break print(\"You have the following Game Paks:\") print(json_files) print(\"Select a Game Pak:\") response", "json file while True: if response in json_files: break print(\"You have the following", "if \"passages\" in world: for passage in world[\"passages\"]: if location_label == passage[\"name\"]: return", "= \"json/\"+response+\".json\" file = open(response, ) world = json.load(file) return world # ----------------------------------------------------------------", "\"passages\" in world: for passage in world[\"passages\"]: if location_label == passage[\"name\"]: return passage", "folder path_to_json_files = \"json/\" json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')]", "def find_current_location(location_label): if \"passages\" in world: for passage in world[\"passages\"]: if location_label ==", "current_location[\"name\"] + \" - \" + current_location[\"cleanText\"]) # Print all passage links for", "file, create a world, return the world response = \"json/\"+response+\".json\" file = open(response,", "update(current_location, location_label, response): #if there is no response, return location_label argument if response", "link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\") return location_label # ---------------------------------------------------------------- world =", "path_to_json_files = \"json/\" json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files", "json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\")", "\"cleanText\" in current_location: # Display passage (render the world) print(\"Moves: \" + str(moves)", "link[\"passageName\"]) def get_input(): response = input(\"Enter option: \") return response.upper().strip() def update(current_location, location_label,", "the json folder path_to_json_files = \"json/\" json_files = [pos_json for pos_json in os.listdir(path_to_json_files)", "#Allow player to select json file while True: if response in json_files: break", "json_files: break print(\"You have the following Game Paks:\") print(json_files) print(\"Select a Game Pak:\")", "current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location, score, moves)", "\" | Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"] + \" - \"", "in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location = find_current_location(location_label) render(current_location, score,", "response) current_location = find_current_location(location_label) render(current_location, score, moves) response = get_input() moves+=1 print(\"Thank you", ">= (3,9), \"This script requires at least Python 3.9\" # ---------------------------------------------------------------- def select_game():", "# see if there are links in the current_location if \"links\" in current_location:", "+ \" - \" + link[\"passageName\"]) def get_input(): response = input(\"Enter option: \")", "\"\": return location_label # see if there are links in the current_location if", "pos_json.endswith('.json')] json_files = [json_file.replace(\".json\", \"\") for json_file in json_files] response = \"\" #Allow", "import sys import json import os assert sys.version_info >= (3,9), \"This script requires", "+ current_location[\"name\"] + \" - \" + current_location[\"cleanText\"]) # Print all passage links", "least Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get all json filenames within the", "location_label == passage[\"name\"]: return passage return {} # ---------------------------------------------------------------- def render(current_location, score, moves):", "world) print(\"Moves: \" + str(moves) + \" | Score: \" + str(score)) print(\"\\n\"", "current_location = {} response = \"\" score = 0 moves = 0 while", "current_location[\"cleanText\"]) # Print all passage links for link in current_location[\"links\"]: print(\" ->\" +", "break if \"score\" in current_location: score+=current_location[\"score\"] location_label = update(current_location, location_label, response) current_location =", "following Game Paks:\") print(json_files) print(\"Select a Game Pak:\") response = input() #get the", "Game Pak:\") response = input() #get the file, create a world, return the", "file while True: if response in json_files: break print(\"You have the following Game", "| Score: \" + str(score)) print(\"\\n\" + current_location[\"name\"] + \" - \" +", "def update(current_location, location_label, response): #if there is no response, return location_label argument if", "moves): if \"name\" in current_location and \"cleanText\" in current_location: # Display passage (render", "return location_label # see if there are links in the current_location if \"links\"", "json_files] response = \"\" #Allow player to select json file while True: if", "sys.version_info >= (3,9), \"This script requires at least Python 3.9\" # ---------------------------------------------------------------- def", "filenames within the json folder path_to_json_files = \"json/\" json_files = [pos_json for pos_json", "+ current_location[\"cleanText\"]) # Print all passage links for link in current_location[\"links\"]: print(\" ->\"", "current_location: #for each link, see if response matches a link for link in", "+ \" - \" + current_location[\"cleanText\"]) # Print all passage links for link", "= 0 moves = 0 while True: if response == \"QUIT\": break if", "\"json/\"+response+\".json\" file = open(response, ) world = json.load(file) return world # ---------------------------------------------------------------- def", "= \"\" score = 0 moves = 0 while True: if response ==", "at least Python 3.9\" # ---------------------------------------------------------------- def select_game(): #Get all json filenames within", "file = open(response, ) world = json.load(file) return world # ---------------------------------------------------------------- def find_current_location(location_label):", "return world # ---------------------------------------------------------------- def find_current_location(location_label): if \"passages\" in world: for passage in", "link for link in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not", "== link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\") return location_label # ---------------------------------------------------------------- world", "{} response = \"\" score = 0 moves = 0 while True: if", "select json file while True: if response in json_files: break print(\"You have the", "link in current_location[\"links\"]: if(response == link[\"linkText\"]): return link[\"passageName\"] else: print(\"Option not found.\") return", "see if there are links in the current_location if \"links\" in current_location: #for", "#get the file, create a world, return the world response = \"json/\"+response+\".json\" file" ]
[ "args.split(',') start = int(args[0]) length = int(args[1]) #print 'generate', start, length return str(start)", "generate( args ): args = args.split(',') start = int(args[0]) length = int(args[1]) #print", "length return str(start) + '-' + str( start + length - 1 )", "args ): args = args.split(',') start = int(args[0]) length = int(args[1]) #print 'generate',", "'generate', start, length return str(start) + '-' + str( start + length -", "start, length return str(start) + '-' + str( start + length - 1", "int(args[1]) #print 'generate', start, length return str(start) + '-' + str( start +", "#print 'generate', start, length return str(start) + '-' + str( start + length", "): args = args.split(',') start = int(args[0]) length = int(args[1]) #print 'generate', start,", "length = int(args[1]) #print 'generate', start, length return str(start) + '-' + str(", "= args.split(',') start = int(args[0]) length = int(args[1]) #print 'generate', start, length return", "def generate( args ): args = args.split(',') start = int(args[0]) length = int(args[1])", "start = int(args[0]) length = int(args[1]) #print 'generate', start, length return str(start) +", "= int(args[1]) #print 'generate', start, length return str(start) + '-' + str( start", "int(args[0]) length = int(args[1]) #print 'generate', start, length return str(start) + '-' +", "= int(args[0]) length = int(args[1]) #print 'generate', start, length return str(start) + '-'", "args = args.split(',') start = int(args[0]) length = int(args[1]) #print 'generate', start, length" ]
[ "j >>= 1 # counter += 1 j = i counter = 0", "import math for i in range(1, 25): r = math.floor(math.log2(i)) #j = i", "math.floor(math.log2(i)) #j = i #counter = 0 #while j != 1: # j", "r = math.floor(math.log2(i)) #j = i #counter = 0 #while j != 1:", "#counter = 0 #while j != 1: # j >>= 1 # counter", "# j >>= 1 # counter += 1 j = i counter =", ">>= 1 # counter += 1 j = i counter = 0 while", "!= 1: # j >>= 1 # counter += 1 j = i", "math for i in range(1, 25): r = math.floor(math.log2(i)) #j = i #counter", "= i counter = 0 while j >>= 1: j >>= 1 counter", "0 while j >>= 1: j >>= 1 counter += 1 print(f\"{i:2} {bin(i)[2:]:>5}", "j != 1: # j >>= 1 # counter += 1 j =", "+= 1 j = i counter = 0 while j >>= 1: j", "i #counter = 0 #while j != 1: # j >>= 1 #", "= 0 while j >>= 1: j >>= 1 counter += 1 print(f\"{i:2}", "j >>= 1: j >>= 1 counter += 1 print(f\"{i:2} {bin(i)[2:]:>5} {r} {counter}\")", "i in range(1, 25): r = math.floor(math.log2(i)) #j = i #counter = 0", "1 j = i counter = 0 while j >>= 1: j >>=", "counter = 0 while j >>= 1: j >>= 1 counter += 1", "= math.floor(math.log2(i)) #j = i #counter = 0 #while j != 1: #", "= i #counter = 0 #while j != 1: # j >>= 1", "range(1, 25): r = math.floor(math.log2(i)) #j = i #counter = 0 #while j", "#j = i #counter = 0 #while j != 1: # j >>=", "0 #while j != 1: # j >>= 1 # counter += 1", "for i in range(1, 25): r = math.floor(math.log2(i)) #j = i #counter =", "in range(1, 25): r = math.floor(math.log2(i)) #j = i #counter = 0 #while", "1: # j >>= 1 # counter += 1 j = i counter", "counter += 1 j = i counter = 0 while j >>= 1:", "j = i counter = 0 while j >>= 1: j >>= 1", "i counter = 0 while j >>= 1: j >>= 1 counter +=", "while j >>= 1: j >>= 1 counter += 1 print(f\"{i:2} {bin(i)[2:]:>5} {r}", "= 0 #while j != 1: # j >>= 1 # counter +=", "# counter += 1 j = i counter = 0 while j >>=", "25): r = math.floor(math.log2(i)) #j = i #counter = 0 #while j !=", "#while j != 1: # j >>= 1 # counter += 1 j", "1 # counter += 1 j = i counter = 0 while j" ]
[ "'Intended Audience :: Developers', 'Framework :: Django', 'Environment :: Web Environment', 'Programming Language", "distutils.core import setup setup(name='django-simple-export', version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export utility", "include_package_data=True, description='Simple import / export utility for Django with large data support. Compatible", "- Pre-Alpha', 'Intended Audience :: Developers', 'Framework :: Django', 'Environment :: Web Environment',", "version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export utility for Django with large", ":: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'Framework :: Django', 'Environment ::", "Developers', 'Framework :: Django', 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming", "setup setup(name='django-simple-export', version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export utility for Django", "<filename>setup.py from distutils.core import setup setup(name='django-simple-export', version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import /", "install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 -", "Django with large data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'],", "'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language ::", "import setup setup(name='django-simple-export', version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export utility for", "Python :: 3.4', 'License :: OSI Approved :: MIT License', 'Topic :: Software", "'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries", "description='Simple import / export utility for Django with large data support. Compatible with", ":: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 3',", "data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended Audience", "Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules',", "Language :: Python :: 3.4', 'License :: OSI Approved :: MIT License', 'Topic", "export utility for Django with large data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>',", "'Programming Language :: Python :: 3.4', 'License :: OSI Approved :: MIT License',", "large data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE',", ":: Python :: 3.4', 'License :: OSI Approved :: MIT License', 'Topic ::", "for Django with large data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7',", "'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'License", "author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status ::", ":: 3.4', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development", "Django', 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python", "with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'],", "license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export utility for Django with large data", "2 - Pre-Alpha', 'Intended Audience :: Developers', 'Framework :: Django', 'Environment :: Web", "'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha',", "from distutils.core import setup setup(name='django-simple-export', version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export", "support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'],", "Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming", "with large data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[],", "/ export utility for Django with large data support. Compatible with Mongoengine.', author='<NAME>',", "Pre-Alpha', 'Intended Audience :: Developers', 'Framework :: Django', 'Environment :: Web Environment', 'Programming", "OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python", "'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended Audience ::", "import / export utility for Django with large data support. Compatible with Mongoengine.',", "py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'Framework", "Audience :: Developers', 'Framework :: Django', 'Environment :: Web Environment', 'Programming Language ::", "classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'Framework :: Django',", "Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4',", "url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2", "Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language", ":: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', ]", ":: Developers', 'Framework :: Django', 'Environment :: Web Environment', 'Programming Language :: Python',", "Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python", ":: 3', 'Programming Language :: Python :: 3.4', 'License :: OSI Approved ::", "'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'Framework ::", ":: Django', 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language ::", "requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended", "provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers',", "Python :: 3', 'Programming Language :: Python :: 3.4', 'License :: OSI Approved", "utility for Django with large data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/',", "data support. Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'],", ":: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python ::", "3', 'Programming Language :: Python :: 3.4', 'License :: OSI Approved :: MIT", "Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development", "3.4', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development ::", "MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', ] )", ":: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries ::", ":: Python :: 3', 'Programming Language :: Python :: 3.4', 'License :: OSI", "setup(name='django-simple-export', version='0.1', license='BSD', packages=['simple_export'], include_package_data=True, description='Simple import / export utility for Django with", "Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'Framework :: Django', 'Environment", "'Framework :: Django', 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language", "'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python ::", "Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'License ::", "packages=['simple_export'], include_package_data=True, description='Simple import / export utility for Django with large data support.", "Compatible with Mongoengine.', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export',", "author='<NAME>', author_email='<EMAIL>', url='https://github.com/mtskelton/django-simple-export/', install_requires=['django>=1.7', 'metamagic.json'], requires=[], data_files=['LICENSE', 'README.md'], provides=['simple_export'], py_modules=['simple_export.management.commands.simple_export', 'simple_export.management.commands.simple_import'], classifiers=['Development Status" ]
[ "color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8) img[:] = color return img def", "if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width,", "cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6,", "sx = x sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath)", "return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor =", "y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300)", "= cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x", "None if len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1)", "cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80) cv2.imshow('demo', frame) cv2.setMouseCallback('demo', mouseCallback) cv2.waitKey(0) cv2.destroyAllWindows()", "frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx !=", "cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame =", "import numpy as np import argparse imagePath = \"img.png\" sx = sy =", "coding: utf-8 -*- import cv2 import sys import numpy as np import argparse", "cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80) cv2.imshow('demo', frame) cv2.setMouseCallback('demo', mouseCallback) cv2.waitKey(0)", "< 3: print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath", "imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8)", "\"img.png\" sx = sy = None previewImage = None if len(sys.argv) < 3:", "sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width, 3),", "3), np.uint8) img[:] = color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event", "import argparse imagePath = \"img.png\" sx = sy = None previewImage = None", "cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80) cv2.imshow('demo', frame) cv2.setMouseCallback('demo', mouseCallback)", "= None previewImage = None if len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive", "None previewImage = None if len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive -i", "if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y cv2.imshow('demo', frame)", "np import argparse imagePath = \"img.png\" sx = sy = None previewImage =", "= None if len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\")", "sx = sy = None previewImage = None if len(sys.argv) < 3: print(\"\"\"", "createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8) img[:] = color return", "= np.zeros((height, width, 3), np.uint8) img[:] = color return img def mouseCallback(event,x,y,flags,param): global", "3: print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath =", "cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\",", "sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height,", "(0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y cv2.imshow('demo',", "def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage", "cv2 import sys import numpy as np import argparse imagePath = \"img.png\" sx", "utf-8 -*- import cv2 import sys import numpy as np import argparse imagePath", "= cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80) cv2.imshow('demo', frame) cv2.setMouseCallback('demo',", "import sys import numpy as np import argparse imagePath = \"img.png\" sx =", "len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\":", "= createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3)", "sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor", "hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx =", "= sy = None previewImage = None if len(sys.argv) < 3: print(\"\"\" Usage:", "mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height,", "imagePath = \"img.png\" sx = sy = None previewImage = None if len(sys.argv)", "previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None):", "(sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y cv2.imshow('demo', frame) cv2.imshow('preview',", "x sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\")", "as np import argparse imagePath = \"img.png\" sx = sy = None previewImage", "img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img", "img = np.zeros((height, width, 3), np.uint8) img[:] = color return img def mouseCallback(event,x,y,flags,param):", "frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80) cv2.imshow('demo', frame)", "cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80)", "previewImage = None if len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive -i img.png", "bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if", "print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2]", "-*- coding: utf-8 -*- import cv2 import sys import numpy as np import", "if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor =", "cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y", "img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x]", "height, color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8) img[:] = color return img", "createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx", "\"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img =", "= color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param)", "def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8) img[:] = color", "= y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500,", "!= None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage)", "== cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0]))", "width, 3), np.uint8) img[:] = color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if", "= sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8) img[:]", "mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage =", "sys.argv[2] def createBlankImage(width, height, color=(255,255,255)): img = np.zeros((height, width, 3), np.uint8) img[:] =", "None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame", "sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\",", "(event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV)", "-i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width, height, color=(255,255,255)):", "global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor)", "if len(sys.argv) < 3: print(\"\"\" Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1) if", "# -*- coding: utf-8 -*- import cv2 import sys import numpy as np", "np.uint8) img[:] = color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event ==", "color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN): print(event,x,y,flags,param) bgrColor", "print(event,x,y,flags,param) bgrColor = frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1)", "python mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def createBlankImage(width,", "Usage: python mouseInteractive -i img.png \"\"\") sys.exit(-1) if sys.argv[1]==\"-i\": imagePath = sys.argv[2] def", "frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500,", "import cv2 import sys import numpy as np import argparse imagePath = \"img.png\"", "-*- import cv2 import sys import numpy as np import argparse imagePath =", "= \"img.png\" sx = sy = None previewImage = None if len(sys.argv) <", "= frame[y][x] previewImage = createBlankImage(200,200,bgrColor) hsvColor = cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx", "print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy =", "sy = None previewImage = None if len(sys.argv) < 3: print(\"\"\" Usage: python", "sys import numpy as np import argparse imagePath = \"img.png\" sx = sy", "img[:] = color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage if (event == cv2.EVENT_LBUTTONDOWN):", "np.zeros((height, width, 3), np.uint8) img[:] = color return img def mouseCallback(event,x,y,flags,param): global sx,sy,previewImage", "= x sy = y cv2.imshow('demo', frame) cv2.imshow('preview', previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\")", "cv2.cvtColor(bgrColor.reshape(1,1,3),cv2.COLOR_BGR2HSV) print(\"bgr->hsv:{}->{}\".format(bgrColor,hsvColor.tolist()[0][0])) cv2.circle(frame,(x,y),6, (0,0,255),-1) if (sx != None): cv2.line(frame,(sx,sy),(x,y),(0,0,255),3) sx = x sy", "argparse imagePath = \"img.png\" sx = sy = None previewImage = None if", "previewImage) frame = cv2.imread(imagePath) cv2.namedWindow(\"demo\") cv2.namedWindow(\"preview\") cv2.moveWindow(\"demo\", 1500, 300) cv2.moveWindow(\"preview\", 1500, 80) cv2.imshow('demo',", "numpy as np import argparse imagePath = \"img.png\" sx = sy = None" ]
[ "position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([", "in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline augmented_shape = (64, 64)", "2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils def", "= noiser.augment_images(augmented) return noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def", "skimage import matplotlib.pyplot as plt import numpy as np from imgaug import augmenters", "return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show = demo_board(images) if save_as", "augmented_shape = (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'),", "matplotlib inline import skimage import matplotlib.pyplot as plt import numpy as np from", "batch(size): originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image in originals]) noised", "= (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0],", "images = [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation", "iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height':", "#%% utils def batch(size): originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image", "augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8,", "noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show", "0, 255) def show(images, save_as=None): to_show = demo_board(images) if save_as is not None:", "= images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image in originals]) noised = noiser.augment_images(augmented)", "imgaug import augmenters as iaa from imageio import mimsave #%% loading def to_rgb(image):", "iaa from imageio import mimsave #%% loading def to_rgb(image): if len(np.shape(image)) == 2:", "5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]})", "import mimsave #%% loading def to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return", "#%% augmentation pipeline augmented_shape = (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0),", "= demo_board(images) if save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show", "#%% loading def to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:, :,", "augmented = np.array([augmenter.augment_image(image) for image in originals]) noised = noiser.augment_images(augmented) return noised, augmented", "== 2: return skimage.color.gray2rgb(image) return image[:, :, :3] images = [] for image", "save_as=None): to_show = demo_board(images) if save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show()", "import augmenters as iaa from imageio import mimsave #%% loading def to_rgb(image): if", "np from imgaug import augmenters as iaa from imageio import mimsave #%% loading", "skimage.color.gray2rgb(image) return image[:, :, :3] images = [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image))", "augmenters as iaa from imageio import mimsave #%% loading def to_rgb(image): if len(np.shape(image))", "iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser", "plt import numpy as np from imgaug import augmenters as iaa from imageio", "originals]) noised = noiser.augment_images(augmented) return noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0,", "[iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils def batch(size):", "image in originals]) noised = noiser.augment_images(augmented) return noised, augmented def demo_board(images): return np.clip(np.concatenate(images,", "utils def batch(size): originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image in", "def show(images, save_as=None): to_show = demo_board(images) if save_as is not None: skimage.io.imsave(save_as, to_show)", "= iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True )", "[] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline augmented_shape", "from imgaug import augmenters as iaa from imageio import mimsave #%% loading def", "'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0,", "(64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1],", "iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%%", "def batch(size): originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image in originals])", "skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def save_gif(path, demo_boards, fps=25): mimsave(path, demo_boards, fps=fps)", "iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ])", "noiser.augment_images(augmented) return noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images,", "if save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def save_gif(path,", "iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf(", "as np from imgaug import augmenters as iaa from imageio import mimsave #%%", "random_order=True ) ]) #%% utils def batch(size): originals = images[np.random.choice(len(images), size)] augmented =", "augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show =", "iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width':", "size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils def batch(size): originals =", "import skimage import matplotlib.pyplot as plt import numpy as np from imgaug import", "images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image in originals]) noised = noiser.augment_images(augmented) return", "return skimage.color.gray2rgb(image) return image[:, :, :3] images = [] for image in skimage.io.imread_collection('statues/*'):", ":3] images = [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%%", "(1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils", "as plt import numpy as np from imgaug import augmenters as iaa from", "16))], random_order=True ) ]) #%% utils def batch(size): originals = images[np.random.choice(len(images), size)] augmented", "mimsave #%% loading def to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:,", "as iaa from imageio import mimsave #%% loading def to_rgb(image): if len(np.shape(image)) ==", "iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils def batch(size): originals = images[np.random.choice(len(images), size)]", "augmentation pipeline augmented_shape = (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5,", ":, :3] images = [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images)", "skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline augmented_shape = (64, 64) augmenter", "from imageio import mimsave #%% loading def to_rgb(image): if len(np.shape(image)) == 2: return", "to_show = demo_board(images) if save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return", "augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1,", "matplotlib.pyplot as plt import numpy as np from imgaug import augmenters as iaa", "numpy as np from imgaug import augmenters as iaa from imageio import mimsave", "not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def save_gif(path, demo_boards, fps=25): mimsave(path,", "for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline augmented_shape =", "loading def to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:, :, :3]", "np.array(images) #%% augmentation pipeline augmented_shape = (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5,", "1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio',", "2: return skimage.color.gray2rgb(image) return image[:, :, :3] images = [] for image in", "#%% imports #% matplotlib inline import skimage import matplotlib.pyplot as plt import numpy", "import numpy as np from imgaug import augmenters as iaa from imageio import", "height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser =", "rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width':", "'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)),", "return image[:, :, :3] images = [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images", "255) def show(images, save_as=None): to_show = demo_board(images) if save_as is not None: skimage.io.imsave(save_as,", "None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def save_gif(path, demo_boards, fps=25): mimsave(path, demo_boards,", "return noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None):", "64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'),", "1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show = demo_board(images) if save_as is not", "if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:, :, :3] images = []", "def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show = demo_board(images)", "image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline augmented_shape = (64,", "originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for image in originals]) noised =", "pipeline augmented_shape = (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5),", "noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True", "]) #%% utils def batch(size): originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image) for", "= iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0],", "show(images, save_as=None): to_show = demo_board(images) if save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show)", "np.array([augmenter.augment_image(image) for image in originals]) noised = noiser.augment_images(augmented) return noised, augmented def demo_board(images):", "is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def save_gif(path, demo_boards, fps=25):", "iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}),", "np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show = demo_board(images) if save_as is", "augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5), iaa.Affine(scale=(0.5, 1.0), rotate=(-5, 5), mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height':", "demo_board(images) if save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def", "= np.array([augmenter.augment_image(image) for image in originals]) noised = noiser.augment_images(augmented) return noised, augmented def", "0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils def batch(size): originals", "save_as is not None: skimage.io.imsave(save_as, to_show) plt.imshow(to_show) plt.show() return to_show def save_gif(path, demo_boards,", "for image in originals]) noised = noiser.augment_images(augmented) return noised, augmented def demo_board(images): return", "images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline augmented_shape = (64, 64) augmenter =", "def to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:, :, :3] images", "0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))], random_order=True ) ]) #%% utils def batch(size): originals = images[np.random.choice(len(images),", "'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2),", "inline import skimage import matplotlib.pyplot as plt import numpy as np from imgaug", "len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:, :, :3] images = [] for", "noised = noiser.augment_images(augmented) return noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255)", "images = np.array(images) #%% augmentation pipeline augmented_shape = (64, 64) augmenter = iaa.Sequential([", "demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8), 0, 255) def show(images, save_as=None): to_show = demo_board(images) if", "<filename>images.py #%% imports #% matplotlib inline import skimage import matplotlib.pyplot as plt import", "imports #% matplotlib inline import skimage import matplotlib.pyplot as plt import numpy as", "= [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images = np.array(images) #%% augmentation pipeline", "image[:, :, :3] images = [] for image in skimage.io.imread_collection('statues/*'): images.append(to_rgb(image)) images =", "]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01, 0.05)), iaa.AdditiveGaussianNoise(scale=(8, 16))],", "= np.array(images) #%% augmentation pipeline augmented_shape = (64, 64) augmenter = iaa.Sequential([ iaa.Fliplr(p=0.5),", "to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image) return image[:, :, :3] images =", "#% matplotlib inline import skimage import matplotlib.pyplot as plt import numpy as np", "imageio import mimsave #%% loading def to_rgb(image): if len(np.shape(image)) == 2: return skimage.color.gray2rgb(image)", "'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2), size_percent=(0.01,", "size)] augmented = np.array([augmenter.augment_image(image) for image in originals]) noised = noiser.augment_images(augmented) return noised,", "in originals]) noised = noiser.augment_images(augmented) return noised, augmented def demo_board(images): return np.clip(np.concatenate(images, 1).astype(np.uint8),", "import matplotlib.pyplot as plt import numpy as np from imgaug import augmenters as", "iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ]) noiser = iaa.Sequential([ iaa.SomeOf( (1, 2), [iaa.CoarseDropout(p=(0.0, 0.2),", "mode='reflect'), iaa.CropToFixedSize(width=augmented_shape[0], height=augmented_shape[1], position='normal'), iaa.Resize(size={'height': augmented_shape[0], 'width': 'keep-aspect-ratio'}), iaa.Resize(size={'height': 'keep-aspect-ratio', 'width': augmented_shape[1]}) ])", ") ]) #%% utils def batch(size): originals = images[np.random.choice(len(images), size)] augmented = np.array([augmenter.augment_image(image)" ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text'", "namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "self.y, self.ts) def __repr__(self): return str(self) def __eq__(self, o): return self.__dict__ == o.__dict__", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system' class ChannelValue(object): def __init__(self,", "License. # You may obtain a copy of the License at # #", "2019, Neptune Labs Sp. z o.o. # # Licensed under the Apache License,", "NUMERIC = 'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM =", "License. # import time from collections import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace", "@property def y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self):", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric' IMAGE =", "compliance with the License. # You may obtain a copy of the License", "namedtuple from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace']", "under the License. # import time from collections import namedtuple from enum import", "Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues',", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "ts = time.time() self._ts = ts @property def ts(self): return self._ts @property def", "this file except in compliance with the License. # You may obtain a", "'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric' IMAGE = 'image' class", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "IMAGE = 'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system' class ChannelValue(object):", "return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self) def __eq__(self, o): return self.__dict__", "Copyright (c) 2019, Neptune Labs Sp. z o.o. # # Licensed under the", "you may not use this file except in compliance with the License. #", "the License. # import time from collections import namedtuple from enum import Enum", "'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC", "self._ts = ts @property def ts(self): return self._ts @property def x(self): return self._x", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "collections import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name',", "self._ts @property def x(self): return self._x @property def y(self): return self._y def __str__(self):", "return str(self) def __eq__(self, o): return self.__dict__ == o.__dict__ def __ne__(self, o): return", "ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system' class ChannelValue(object): def __init__(self, x, y,", "ANY KIND, either express or implied. # See the License for the specific", "is None: ts = time.time() self._ts = ts @property def ts(self): return self._ts", "import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues =", "'text' NUMERIC = 'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM", "= y if ts is None: ts = time.time() self._ts = ts @property", "@property def ts(self): return self._ts @property def x(self): return self._x @property def y(self):", "ts): self._x = x self._y = y if ts is None: ts =", "enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues", "in compliance with the License. # You may obtain a copy of the", "import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type',", "'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self) def __eq__(self, o): return self.__dict__ ==", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "__str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self) def __eq__(self, o): return", "ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric'", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "from collections import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id',", "use this file except in compliance with the License. # You may obtain", "permissions and # limitations under the License. # import time from collections import", "= namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric' IMAGE", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "self.ts) def __repr__(self): return str(self) def __eq__(self, o): return self.__dict__ == o.__dict__ def", "not use this file except in compliance with the License. # You may", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "self._x = x self._y = y if ts is None: ts = time.time()", "SYSTEM = 'system' class ChannelValue(object): def __init__(self, x, y, ts): self._x = x", "from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] )", "See the License for the specific language governing permissions and # limitations under", "def __repr__(self): return str(self) def __eq__(self, o): return self.__dict__ == o.__dict__ def __ne__(self,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "TEXT = 'text' NUMERIC = 'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "# import time from collections import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace =", "__init__(self, x, y, ts): self._x = x self._y = y if ts is", "OF ANY KIND, either express or implied. # See the License for the", "['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT", "2.0 (the \"License\"); # you may not use this file except in compliance", "Labs Sp. z o.o. # # Licensed under the Apache License, Version 2.0", "# you may not use this file except in compliance with the License.", "governing permissions and # limitations under the License. # import time from collections", "for the specific language governing permissions and # limitations under the License. #", "def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self) def __eq__(self, o):", "agreed to in writing, software # distributed under the License is distributed on", "the specific language governing permissions and # limitations under the License. # import", "class ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric' IMAGE = 'image' class ChannelNamespace(Enum):", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "USER = 'user' SYSTEM = 'system' class ChannelValue(object): def __init__(self, x, y, ts):", "o.o. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "def __init__(self, x, y, ts): self._x = x self._y = y if ts", "self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self) def __eq__(self,", "= 'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system' class ChannelValue(object): def", "(the \"License\"); # you may not use this file except in compliance with", "x(self): return self._x @property def y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y,", "# Copyright (c) 2019, Neptune Labs Sp. z o.o. # # Licensed under", "# # Unless required by applicable law or agreed to in writing, software", "self._x @property def y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def", "= 'text' NUMERIC = 'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER = 'user'", "express or implied. # See the License for the specific language governing permissions", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "= ts @property def ts(self): return self._ts @property def x(self): return self._x @property", "def __eq__(self, o): return self.__dict__ == o.__dict__ def __ne__(self, o): return not self.__eq__(o)", "by applicable law or agreed to in writing, software # distributed under the", "import time from collections import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple(", "return self._x @property def y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "z o.o. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "either express or implied. # See the License for the specific language governing", "time.time() self._ts = ts @property def ts(self): return self._ts @property def x(self): return", ") ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC =", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "specific language governing permissions and # limitations under the License. # import time", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "y if ts is None: ts = time.time() self._ts = ts @property def", "def ts(self): return self._ts @property def x(self): return self._x @property def y(self): return", "return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return str(self) def", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "= 'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system'", "ts(self): return self._ts @property def x(self): return self._x @property def y(self): return self._y", "# limitations under the License. # import time from collections import namedtuple from", "ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id',", "['channel_id', 'channel_values']) class ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric' IMAGE = 'image'", "file except in compliance with the License. # You may obtain a copy", "@property def x(self): return self._x @property def y(self): return self._y def __str__(self): return", "str(self) def __eq__(self, o): return self.__dict__ == o.__dict__ def __ne__(self, o): return not", "def x(self): return self._x @property def y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x,", "'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system' class", "(c) 2019, Neptune Labs Sp. z o.o. # # Licensed under the Apache", "class ChannelNamespace(Enum): USER = 'user' SYSTEM = 'system' class ChannelValue(object): def __init__(self, x,", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "Neptune Labs Sp. z o.o. # # Licensed under the Apache License, Version", "License for the specific language governing permissions and # limitations under the License.", "__repr__(self): return str(self) def __eq__(self, o): return self.__dict__ == o.__dict__ def __ne__(self, o):", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "y, ts): self._x = x self._y = y if ts is None: ts", "the License. # You may obtain a copy of the License at #", "to in writing, software # distributed under the License is distributed on an", "\"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum):", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "ts @property def ts(self): return self._ts @property def x(self): return self._x @property def", "class ChannelValue(object): def __init__(self, x, y, ts): self._x = x self._y = y", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "= 'user' SYSTEM = 'system' class ChannelValue(object): def __init__(self, x, y, ts): self._x", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "required by applicable law or agreed to in writing, software # distributed under", "language governing permissions and # limitations under the License. # import time from", "ChannelValue(object): def __init__(self, x, y, ts): self._x = x self._y = y if", "applicable law or agreed to in writing, software # distributed under the License", "x self._y = y if ts is None: ts = time.time() self._ts =", "'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values']) class ChannelType(Enum): TEXT =", "'user' SYSTEM = 'system' class ChannelValue(object): def __init__(self, x, y, ts): self._x =", "self._y = y if ts is None: ts = time.time() self._ts = ts", "ChannelType(Enum): TEXT = 'text' NUMERIC = 'numeric' IMAGE = 'image' class ChannelNamespace(Enum): USER", "def y(self): return self._y def __str__(self): return 'ChannelValue(x={},y={},ts={})'.format(self.x, self.y, self.ts) def __repr__(self): return", "return self._ts @property def x(self): return self._x @property def y(self): return self._y def", "and # limitations under the License. # import time from collections import namedtuple", "or agreed to in writing, software # distributed under the License is distributed", "or implied. # See the License for the specific language governing permissions and", "# # Copyright (c) 2019, Neptune Labs Sp. z o.o. # # Licensed", "Sp. z o.o. # # Licensed under the Apache License, Version 2.0 (the", "time from collections import namedtuple from enum import Enum ChannelNameWithTypeAndNamespace = namedtuple( \"ChannelNameWithType\",", "x, y, ts): self._x = x self._y = y if ts is None:", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "= namedtuple( \"ChannelNameWithType\", ['channel_id', 'channel_name', 'channel_type', 'channel_namespace'] ) ChannelIdWithValues = namedtuple('ChannelIdWithValues', ['channel_id', 'channel_values'])", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "'system' class ChannelValue(object): def __init__(self, x, y, ts): self._x = x self._y =", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "if ts is None: ts = time.time() self._ts = ts @property def ts(self):", "= 'system' class ChannelValue(object): def __init__(self, x, y, ts): self._x = x self._y", "ts is None: ts = time.time() self._ts = ts @property def ts(self): return", "= x self._y = y if ts is None: ts = time.time() self._ts", "with the License. # You may obtain a copy of the License at", "limitations under the License. # import time from collections import namedtuple from enum", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "= time.time() self._ts = ts @property def ts(self): return self._ts @property def x(self):", "None: ts = time.time() self._ts = ts @property def ts(self): return self._ts @property", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "plusminus import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions", "\"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on if __name__ == '__main__': parser", "\"sum\": sum(args)}) def maxn(n, *values): ret = sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\":", "d20 min(d6, d6, d6) maxn(2, d6, d6, d6) (select top 2 of 3", "# dice_roll_parser.py # # Copyright 2021, <NAME> # from plusminus import BaseArithmeticParser #", "as used in many board and role-playing games, such as: d20 3d20 5d6", "min(d6, d6, d6) maxn(2, d6, d6, d6) (select top 2 of 3 d6", "evaluating expressions representing rolls of dice, as used in many board and role-playing", "role-playing games, such as: d20 3d20 5d6 + d20 min(d6, d6, d6) maxn(2,", "top 2 of 3 d6 rolls) show(d6, d6, d6) \"\"\" def customize(self): import", "..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)})", "..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values):", "parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6,", "{\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values): ret = sorted(values, reverse=True)[:n] return {\"n\":", "random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a,", "BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing rolls", "\"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on if", "maxn(2, d6, d6, d6) (select top 2 of 3 d6 rolls) show(d6, d6,", "for _ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda", "BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b)", "\"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on if __name__ ==", "rolls) show(d6, d6, d6) \"\"\" def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda", "d6, d6) \"\"\" def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1,", "class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing rolls of dice, as used", "2021, <NAME> # from plusminus import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\"", "used in many board and role-playing games, such as: d20 3d20 5d6 +", "import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing", "*values): ret = sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\":", "off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing rolls of dice, as", "d6, d6, d6) (select top 2 of 3 d6 rolls) show(d6, d6, d6)", "1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1,", "fmt: on if __name__ == '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6", "== '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6,", "3d20 5d6 + d20 min(d6, d6, d6) maxn(2, d6, d6, d6) (select top", "self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\",", "..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values): ret = sorted(values,", "sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\",", "return {\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) #", "import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda", "ret = sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)}", "on if __name__ == '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4", "3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6, d6, d6) \"\"\", postParse=lambda _, result:", "__name__ == '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6,", "from plusminus import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating", "<NAME> # from plusminus import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser", "sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on if __name__ == '__main__': parser =", "parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6, d6, d6) \"\"\",", "5d6 + d20 min(d6, d6, d6) maxn(2, d6, d6, d6) (select top 2", "def maxn(n, *values): ret = sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\":", "self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n,", "b: sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max)", "def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2,", "2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\", ...,", "d6) \"\"\" def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a))", "lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for", "self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\":", "a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for _ in range(a)))", "such as: d20 3d20 5d6 + d20 min(d6, d6, d6) maxn(2, d6, d6,", "..., maxn) # fmt: on if __name__ == '__main__': parser = DiceRollParser() parser.runTests(", "if __name__ == '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100", "maxn(n, *values): ret = sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\": ret,", "max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values): ret", "d20+3d4 2d100 max(d6, d6, d6) show(d6, d6, d6) \"\"\", postParse=lambda _, result: result[0].evaluate(),", "{\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt:", "2 of 3 d6 rolls) show(d6, d6, d6) \"\"\" def customize(self): import random", "a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for _", "lambda a, b: sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\",", "n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on", "reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn)", "d6) maxn(2, d6, d6, d6) (select top 2 of 3 d6 rolls) show(d6,", "d6) (select top 2 of 3 d6 rolls) show(d6, d6, d6) \"\"\" def", "self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b:", "*args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values): ret = sorted(values, reverse=True)[:n] return", "representing rolls of dice, as used in many board and role-playing games, such", "DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing rolls of dice, as used in", "= DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6, d6,", "2d100 max(d6, d6, d6) show(d6, d6, d6) \"\"\", postParse=lambda _, result: result[0].evaluate(), )", "ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on if __name__ == '__main__':", "in many board and role-playing games, such as: d20 3d20 5d6 + d20", "self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values): ret =", "maxn) # fmt: on if __name__ == '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\", "d20 3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6, d6, d6) \"\"\", postParse=lambda _,", "sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ...,", "DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6, d6, d6)", "board and role-playing games, such as: d20 3d20 5d6 + d20 min(d6, d6,", "+ d20 min(d6, d6, d6) maxn(2, d6, d6, d6) (select top 2 of", "in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\":", "\"\"\" Parser for evaluating expressions representing rolls of dice, as used in many", "BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\", ..., min)", "Parser for evaluating expressions representing rolls of dice, as used in many board", "dice_roll_parser.py # # Copyright 2021, <NAME> # from plusminus import BaseArithmeticParser # fmt:", "min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def", "of 3 d6 rolls) show(d6, d6, d6) \"\"\" def customize(self): import random self.add_operator(\"d\",", "d6 rolls) show(d6, d6, d6) \"\"\" def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT,", "and role-playing games, such as: d20 3d20 5d6 + d20 min(d6, d6, d6)", "_ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args:", "a, b: sum(random.randint(1, b) for _ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ...,", "dice, as used in many board and role-playing games, such as: d20 3d20", "rolls of dice, as used in many board and role-playing games, such as:", "values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\", ..., maxn) # fmt: on if __name__", "range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ..., lambda *args: {\"rolls\": list(args),", "as: d20 3d20 5d6 + d20 min(d6, d6, d6) maxn(2, d6, d6, d6)", "d6, d6) (select top 2 of 3 d6 rolls) show(d6, d6, d6) \"\"\"", "show(d6, d6, d6) \"\"\" def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a:", "customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT,", "many board and role-playing games, such as: d20 3d20 5d6 + d20 min(d6,", "'__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6, d6)", "(select top 2 of 3 d6 rolls) show(d6, d6, d6) \"\"\" def customize(self):", "# Copyright 2021, <NAME> # from plusminus import BaseArithmeticParser # fmt: off class", "\"\"\"\\ d20 3d6 d20+3d4 2d100 max(d6, d6, d6) show(d6, d6, d6) \"\"\", postParse=lambda", "# fmt: on if __name__ == '__main__': parser = DiceRollParser() parser.runTests( \"\"\"\\ d20", "# # dice_roll_parser.py # # Copyright 2021, <NAME> # from plusminus import BaseArithmeticParser", "Copyright 2021, <NAME> # from plusminus import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser):", "for evaluating expressions representing rolls of dice, as used in many board and", "3 d6 rolls) show(d6, d6, d6) \"\"\" def customize(self): import random self.add_operator(\"d\", 1,", "of dice, as used in many board and role-playing games, such as: d20", "d20 3d20 5d6 + d20 min(d6, d6, d6) maxn(2, d6, d6, d6) (select", "= sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values, \"maxn\": ret, \"sum\": sum(ret)} self.add_function(\"maxn\",", "games, such as: d20 3d20 5d6 + d20 min(d6, d6, d6) maxn(2, d6,", "random.randint(1, a)) self.add_operator(\"d\", 2, BaseArithmeticParser.LEFT, lambda a, b: sum(random.randint(1, b) for _ in", "list(args), \"sum\": sum(args)}) def maxn(n, *values): ret = sorted(values, reverse=True)[:n] return {\"n\": n,", "# # Copyright 2021, <NAME> # from plusminus import BaseArithmeticParser # fmt: off", "b) for _ in range(a))) self.add_function(\"min\", ..., min) self.add_function(\"max\", ..., max) self.add_function(\"show\", ...,", "lambda *args: {\"rolls\": list(args), \"sum\": sum(args)}) def maxn(n, *values): ret = sorted(values, reverse=True)[:n]", "expressions representing rolls of dice, as used in many board and role-playing games,", "self.add_function(\"maxn\", ..., maxn) # fmt: on if __name__ == '__main__': parser = DiceRollParser()", "# fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing rolls of", "fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for evaluating expressions representing rolls of dice,", "\"\"\" def customize(self): import random self.add_operator(\"d\", 1, BaseArithmeticParser.RIGHT, lambda a: random.randint(1, a)) self.add_operator(\"d\",", "d6, d6) maxn(2, d6, d6, d6) (select top 2 of 3 d6 rolls)", "sum(args)}) def maxn(n, *values): ret = sorted(values, reverse=True)[:n] return {\"n\": n, \"rolls\": values,", "# from plusminus import BaseArithmeticParser # fmt: off class DiceRollParser(BaseArithmeticParser): \"\"\" Parser for" ]
[ "\"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0] }) # take a", "a new data frame import numpy as np import pandas as pd #", "\"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes with more than one high", "# # name ch02.py # 数据帧(类似excel) # create a new data frame import", "numpy as np import pandas as pd # create a new data frame", "structure & contents print(assets_df) assets_df.head() # show a \"slice\" just the operating systmes", "column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes with", "& high vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"],", "one high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes into network 'zones' based", "a look at the data frame structure & contents print(assets_df) assets_df.head() # show", "\"10.2.7.6\", \"10.2.7.7\"] # show only nodes with more than one high vulnerabilty assets_df[assets_df.highvulns", "address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") # get one final view assets_df.head()", "\"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0] }) # take", "assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") # get one final view assets_df.head() print(assets_df)", "of hosts & high vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\",", "just the operating systmes assets_df.os.head() # print(assets_df.os.head()) # add a new column assets_df['ip']", "with more than one high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes into", "add a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show", "only nodes with more than one high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide", "assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes with more", "frame of hosts & high vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\",", "frame import numpy as np import pandas as pd # create a new", "assets_df.head() # show a \"slice\" just the operating systmes assets_df.os.head() # print(assets_df.os.head()) #", "operating systmes assets_df.os.head() # print(assets_df.os.head()) # add a new column assets_df['ip'] = [\"192.168.1.5\",", "IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") # get one final view", "\"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2,", "into network 'zones' based on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\")", "\"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0]", "vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\",", "print(assets_df) assets_df.head() # show a \"slice\" just the operating systmes assets_df.os.head() # print(assets_df.os.head())", "ch02.py # 数据帧(类似excel) # create a new data frame import numpy as np", "as pd # create a new data frame of hosts & high vuln", "more than one high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes into network", "& contents print(assets_df) assets_df.head() # show a \"slice\" just the operating systmes assets_df.os.head()", "assets_df[assets_df.highvulns > 1].head() # divide nodes into network 'zones' based on IP address", "contents print(assets_df) assets_df.head() # show a \"slice\" just the operating systmes assets_df.os.head() #", "<reponame>verazuo/Code-For-Data-driven-Security # # name ch02.py # 数据帧(类似excel) # create a new data frame", "\"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0] }) # take a look at", "= [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes with more than", "# divide nodes into network 'zones' based on IP address assets_df['zones'] = np.where(", "\"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\":", "# show a \"slice\" just the operating systmes assets_df.os.head() # print(assets_df.os.head()) # add", "divide nodes into network 'zones' based on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"),", "look at the data frame structure & contents print(assets_df) assets_df.head() # show a", "# add a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] #", "data frame structure & contents print(assets_df) assets_df.head() # show a \"slice\" just the", "pd # create a new data frame of hosts & high vuln counts", "\"slice\" just the operating systmes assets_df.os.head() # print(assets_df.os.head()) # add a new column", "create a new data frame of hosts & high vuln counts assets_df =", "}) # take a look at the data frame structure & contents print(assets_df)", "show only nodes with more than one high vulnerabilty assets_df[assets_df.highvulns > 1].head() #", "than one high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes into network 'zones'", "import pandas as pd # create a new data frame of hosts &", "1].head() # divide nodes into network 'zones' based on IP address assets_df['zones'] =", "pandas as pd # create a new data frame of hosts & high", "new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes", "# take a look at the data frame structure & contents print(assets_df) assets_df.head()", "[1, 0, 2, 0, 0] }) # take a look at the data", "assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\",", "import numpy as np import pandas as pd # create a new data", "\"10.2.7.7\"] # show only nodes with more than one high vulnerabilty assets_df[assets_df.highvulns >", "pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"],", "create a new data frame import numpy as np import pandas as pd", "\"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0,", "counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\",", "[\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes with more than one", "as np import pandas as pd # create a new data frame of", "[\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0] }) #", "\"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0] })", "print(assets_df.os.head()) # add a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"]", "name ch02.py # 数据帧(类似excel) # create a new data frame import numpy as", "\"highvulns\": [1, 0, 2, 0, 0] }) # take a look at the", "= pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\",", "high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes into network 'zones' based on", "high vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\":", "[\"danube\", \"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1,", "show a \"slice\" just the operating systmes assets_df.os.head() # print(assets_df.os.head()) # add a", "# print(assets_df.os.head()) # add a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\",", "0] }) # take a look at the data frame structure & contents", "new data frame of hosts & high vuln counts assets_df = pd.DataFrame({ \"name\":", "np import pandas as pd # create a new data frame of hosts", "systmes assets_df.os.head() # print(assets_df.os.head()) # add a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\",", "# create a new data frame import numpy as np import pandas as", "# 数据帧(类似excel) # create a new data frame import numpy as np import", "a \"slice\" just the operating systmes assets_df.os.head() # print(assets_df.os.head()) # add a new", "0, 0] }) # take a look at the data frame structure &", "\"gander\", \"ganges\", \"mekong\", \"orinoco\"], \"os\": [\"W2K8\", \"RHEL5\", \"W2K8\", \"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0,", "a new data frame of hosts & high vuln counts assets_df = pd.DataFrame({", "nodes with more than one high vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes", "# show only nodes with more than one high vulnerabilty assets_df[assets_df.highvulns > 1].head()", "\"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only nodes with more than one high vulnerabilty", "new data frame import numpy as np import pandas as pd # create", "'zones' based on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") # get", "hosts & high vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\", \"gander\", \"ganges\", \"mekong\",", "a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\", \"10.2.7.6\", \"10.2.7.7\"] # show only", "data frame of hosts & high vuln counts assets_df = pd.DataFrame({ \"name\": [\"danube\",", "data frame import numpy as np import pandas as pd # create a", "network 'zones' based on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") #", "at the data frame structure & contents print(assets_df) assets_df.head() # show a \"slice\"", "on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") # get one final", "\"RHEL5\", \"RHEL5\"], \"highvulns\": [1, 0, 2, 0, 0] }) # take a look", "# name ch02.py # 数据帧(类似excel) # create a new data frame import numpy", "the operating systmes assets_df.os.head() # print(assets_df.os.head()) # add a new column assets_df['ip'] =", "nodes into network 'zones' based on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\",", "2, 0, 0] }) # take a look at the data frame structure", "数据帧(类似excel) # create a new data frame import numpy as np import pandas", "0, 2, 0, 0] }) # take a look at the data frame", "# create a new data frame of hosts & high vuln counts assets_df", "the data frame structure & contents print(assets_df) assets_df.head() # show a \"slice\" just", "> 1].head() # divide nodes into network 'zones' based on IP address assets_df['zones']", "frame structure & contents print(assets_df) assets_df.head() # show a \"slice\" just the operating", "based on IP address assets_df['zones'] = np.where( assets_df.ip.str.startswith(\"192\"), \"Zone1\", \"Zone2\") # get one", "take a look at the data frame structure & contents print(assets_df) assets_df.head() #", "assets_df.os.head() # print(assets_df.os.head()) # add a new column assets_df['ip'] = [\"192.168.1.5\", \"10.2.7.5\", \"192.168.1.7\",", "vulnerabilty assets_df[assets_df.highvulns > 1].head() # divide nodes into network 'zones' based on IP" ]
[ "with the server CONNECTED = auto() #: Disconnected state DISCONNECTED = auto() #:", "the result of an asynchronous task Emit signals about errors if the *future's*", "the method immediately returns, and then the results or the potential errors during", "subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param url: CometD service", "asynchronous message sending task :param response: A response associated with the *future* :param", "async for message in client: # emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message)", "raise InvalidStateError(\"Can't send messages in a non-connected \" \"state.\") if self._client is None:", "= auto() #: Disconnected state DISCONNECTED = auto() #: Disconnected state due to", "used to schedule tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop` is used to", "connect to the service async with aiocometd.Client(self._url, loop=self._loop) as client: # set the", "import partial from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures", "the *coro* \"\"\" if loop is None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro,", "QObject # type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co =", "when the client enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal emited", "= future.result() # notify listeners that a response has been received response.finished.emit() #", "= asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None: future.add_done_callback(callback) return", "from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject", "changed then don't do anything if new_state != self._state: self._state = new_state #", "messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client attribute self._client = None #", "import aiocometd from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal,", "to the service async with aiocometd.Client(self._url, loop=self._loop) as client: # set the asynchronous", "Emited when the response has been received finished = pyqtSignal() # pylint: enable=too-few-public-methods", "state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result", "new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to the CometD service", "ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState:", "aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject #", "# type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\",", "\\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the given *coro* and set *callback*", "None result: Optional[JsonObject] = None #: Emited when the response has been received", "listeners that the state changed self.state_changed.emit(self._state) # emit state specific signals if new_state", "sent by the service as long as the client is open \"\"\" #", "response associated with the message \"\"\" # check that the client has been", "shared by Qt's and asyncio's events, the concurrent.futures.Future can't be awaited, blocking is", "in synchronous code if it runs on a quamash event loop. Since the", "object \\ associated with *coro* :param loop: The event loop on which the", "*future's* result is an exception. :param future: A future associated with the asynchronous", "asyncio's events, the concurrent.futures.Future can't be awaited, blocking is not allowed. Instead, this", "do anything if new_state != self._state: self._state = new_state # notify listeners that", "service and retreive the messages sent by the service as long as the", "self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED:", "subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If *loop* is", "not None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected", "raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return", "potential errors during the asynchronous operation are broadcasted with signals. \"\"\" #: Signal", "*coro* :param loop: The event loop on which the *coro* should be scheduled", "of a sent CometD message\"\"\" #: Contains the exception object if finished with", "None #: Emited when the response has been received finished = pyqtSignal() #", "that the client has been initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send", "ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with the server CONNECTED = auto() #:", "specific signals if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to", "asynchronous task \"\"\" # set the error or result attributes of the response", "= future.exception() else: response.result = future.result() # notify listeners that a response has", "the state of the client to *state*\"\"\" # if the state didn't changed", "is finished :param coro: A coroutine :param callback: A callback function called with", "callback is not None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\"", "url: CometD service url :param subscriptions: A list of channels to which the", "response: A response associated with the *future* :param future: A future associated with", "pyqtProperty, QObject # type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co", "The event loop on which the *coro* should be scheduled :return: The future", "= None) -> None: \"\"\" :param url: CometD service url :param subscriptions: A", "pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] =", "disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,)", "future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None: future.add_done_callback(callback) return future @unique", "T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]]", "Any import concurrent.futures as futures from contextlib import suppress import aiocometd from aiocometd.typing", "for execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop ) async def _connect(self) ->", "the server CONNECTED = auto() #: Disconnected state DISCONNECTED = auto() #: Disconnected", "def publish(self, channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to the given", "self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current state of", "client is already connected then it does nothing. \"\"\" # don't do anything", "non-connected \" \"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response =", "exception. :param future: A future associated with the asynchronous task \"\"\" # clear", "with signals. \"\"\" #: Signal emited when the client's state is changed state_changed", "error = pyqtSignal(Exception) #: Signal emited when a message has been received from", "client states\"\"\" #: Connected with the server CONNECTED = auto() #: Disconnected state", "InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response", "to send to the server :return: Return the response associated with the message", ":param data: Data to send to the server :return: Return the response associated", "errors during the asynchronous operation are broadcasted with signals. \"\"\" #: Signal emited", "= ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from the CometD service If", "if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a non-connected \" \"state.\")", ":obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal emited when a message has been", "self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a non-connected \" \"state.\") if", "ERROR = auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous", "similarly to how asynchronous network operations are implemented in Qt. Namely, on a", "state error = pyqtSignal(Exception) #: Signal emited when a message has been received", "didn't changed then don't do anything if new_state != self._state: self._state = new_state", "ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result of an asynchronous", "sending task :param response: A response associated with the *future* :param future: A", "listening for messages The function returns immediately. On success the :obj:`~CometdClient.connected` signal is", ":obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop` is", "data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to the given *channel* :param channel: Name", ":param response: A response associated with the *future* :param future: A future associated", "= None # put the client into a disconnected state self.state = ClientState.DISCONNECTED", "task has been initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\")", "which the *coro* should be scheduled :return: The future associated with the *coro*", "task Emit signals about errors if the *future's* result is an exception. :param", "the task member self._connect_task = None error = None with suppress(futures.CancelledError): error =", "disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the", "client to *state*\"\"\" # if the state didn't changed then don't do anything", "notify listeners that the state changed self.state_changed.emit(self._state) # emit state specific signals if", "pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client implementation This class", "\"\"\"Synchronous CometD client implementation This class enables the asynchronous Client class from aiocometd", "concurrent.futures.Future can't be awaited, blocking is not allowed. Instead, this class is implemented", "\"\"\" :param url: CometD service url :param subscriptions: A list of channels to", "CometD service url :param subscriptions: A list of channels to which the client", "result of an asynchronous message sending task :param response: A response associated with", "class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with the server CONNECTED = auto()", "\"\"\"Schedule the execution of the given *coro* and set *callback* to be called", "Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the", "unique, auto import asyncio from functools import partial from typing import Optional, Iterable,", "CONNECTED = auto() #: Disconnected state DISCONNECTED = auto() #: Disconnected state due", "# if the state didn't changed then don't do anything if new_state !=", "client into a connected state self.state = ClientState.CONNECTED # listen for incoming messages", "{ ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def", "None: \"\"\"Evaluate the result of an asynchronous task Emit signals about errors if", "auto() #: Disconnected state DISCONNECTED = auto() #: Disconnected state due to an", "was completed normally or it exited with an exception if future.exception() is not", "None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from the CometD", "-> None: \"\"\"Connect to the CometD service and start listening for messages The", "errors if the *future's* result is an exception. :param future: A future associated", "the server :return: Return the response associated with the message \"\"\" # check", "in self._subscriptions: await client.subscribe(subscription) # put the client into a connected state self.state", "if self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data),", "when the client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal", "asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum):", "PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions", "service and start listening for messages The function returns immediately. On success the", "don't do anything if already connected if self.state != ClientState.CONNECTED: # schedule the", "future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with the server CONNECTED", "self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from the CometD service", "with *coro* :param loop: The event loop on which the *coro* should be", "# listen for incoming messages with suppress(futures.CancelledError): async for message in client: #", "pyqtSignal, pyqtProperty, QObject # type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError", ") async def _connect(self) -> None: \"\"\"Connect to the CometD service and retreive", "message \"\"\" # check that the client has been initialized if self.state !=", "on which the *coro* should be scheduled :return: The future associated with the", "the *future's* result is an exception. :param future: A future associated with the", "*data* to the given *channel* :param channel: Name of the channel :param data:", "set the error or result attributes of the response depending on # whether", "\"\"\" if loop is None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if", "-> None: \"\"\" :param url: CometD service url :param subscriptions: A list of", "received from the server message_received = pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str],", "the coroutine for execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop ) async def", "None with suppress(futures.CancelledError): error = future.exception() if error is not None: self.state =", "from aiocometd to be used in synchronous code if it runs on a", "it was completed normally or it exited with an exception if future.exception() is", "None: response.error = future.exception() else: response.result = future.result() # notify listeners that a", "Disconnected state due to an error ERROR = auto() # pylint: disable=too-few-public-methods class", "response associated with the *future* :param future: A future associated with the asynchronous", "by Qt's and asyncio's events, the concurrent.futures.Future can't be awaited, blocking is not", "message\"\"\" #: Contains the exception object if finished with an error, otherwise None", "of the response depending on # whether it was completed normally or it", "state specific signals if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect", "#: Emited when the response has been received finished = pyqtSignal() # pylint:", "\"\"\"Evaluate the result of an asynchronous message sending task :param response: A response", "of the server when finished successfully, #: otherwise None result: Optional[JsonObject] = None", "error or result attributes of the response depending on # whether it was", "client\"\"\" from enum import IntEnum, unique, auto import asyncio from functools import partial", "put the client into a connected state self.state = ClientState.CONNECTED # listen for", "#: Signal emited when the client enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception)", "ignore def state(self, new_state: ClientState) -> None: \"\"\"Set the state of the client", "self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str,", "# clear the task member self._connect_task = None error = None with suppress(futures.CancelledError):", "Callable, Any import concurrent.futures as futures from contextlib import suppress import aiocometd from", "concurrent.futures as futures from contextlib import suppress import aiocometd from aiocometd.typing import JsonObject", "the client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal emited", "= loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals =", "# subscribe to all the channels for subscription in self._subscriptions: await client.subscribe(subscription) #", "message_received = pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None)", "operations are implemented in Qt. Namely, on a method call the operation is", "\"\"\" # don't do anything if already connected if self.state != ClientState.CONNECTED: #", "response of the server when finished successfully, #: otherwise None result: Optional[JsonObject] =", "signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client attribute self._client", "-> None: \"\"\"Evaluate the result of an asynchronous message sending task :param response:", "how asynchronous network operations are implemented in Qt. Namely, on a method call", "!= ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a non-connected \" \"state.\") if self._client", "client's state is changed state_changed = pyqtSignal(ClientState) #: Signal emited when the client", "should \\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If", "class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result of a sent CometD message\"\"\"", "def __init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\"", "is an exception. :param future: A future associated with the asynchronous task \"\"\"", "does nothing. \"\"\" if self.state == ClientState.CONNECTED: # check that the task has", "raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data: JsonObject) ->", "client enters the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #: Signal emited when", "self.state == ClientState.CONNECTED: # check that the task has been initialized if self._connect_task", "new_state: ClientState) -> None: \"\"\"Set the state of the client to *state*\"\"\" #", "about errors if the *future's* result is an exception. :param future: A future", "already connected then it does nothing. \"\"\" # don't do anything if already", "-> None: \"\"\"Set the state of the client to *state*\"\"\" # if the", "the client is open \"\"\" # connect to the service async with aiocometd.Client(self._url,", "asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected,", "import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback:", "# clear the asynchronous client attribute self._client = None # put the client", "\"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to", "not None: response.error = future.exception() else: response.result = future.result() # notify listeners that", "Signal emited when a message has been received from the server message_received =", "the execution of the given *coro* and set *callback* to be called when", "the *coro* should be scheduled :return: The future associated with the *coro* \"\"\"", "returns, and then the results or the potential errors during the asynchronous operation", "\"\"\"CometD client states\"\"\" #: Connected with the server CONNECTED = auto() #: Disconnected", "The function returns immediately. On success the :obj:`~CometdClient.connected` signal is emited or the", "= pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore", "@unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with the server CONNECTED =", "into a disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None:", "loop: The event loop on which the *coro* should be scheduled :return: The", "self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to the CometD service and start", "= None #: Emited when the response has been received finished = pyqtSignal()", "the client should \\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule", "JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore", "and retreive the messages sent by the service as long as the client", "open \"\"\" # connect to the service async with aiocometd.Client(self._url, loop=self._loop) as client:", "#: Signal emited when a message has been received from the server message_received", "await client.subscribe(subscription) # put the client into a connected state self.state = ClientState.CONNECTED", "listen for incoming messages with suppress(futures.CancelledError): async for message in client: # emit", "function returns immediately. On success the :obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error`", "import Optional, Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures as futures from contextlib", "received finished = pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): #", "enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal emited when the", "run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\")", "self._subscriptions: await client.subscribe(subscription) # put the client into a connected state self.state =", "task :param response: A response associated with the *future* :param future: A future", "with the asynchronous task \"\"\" # clear the task member self._connect_task = None", "\"\"\"The asynchronous result of a sent CometD message\"\"\" #: Contains the exception object", "InvalidStateError(\"Can't send messages in a non-connected \" \"state.\") if self._client is None: raise", "the CometD service and retreive the messages sent by the service as long", "normally or it exited with an exception if future.exception() is not None: response.error", "str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param url: CometD", "-> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the given *coro* and set *callback* to", "state due to an error ERROR = auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject):", "not allowed. Instead, this class is implemented similarly to how asynchronous network operations", "disconnected = pyqtSignal() #: Signal emited when the client enters the :obj:`~ClientState.ERROR` state", "connect_(self) -> None: \"\"\"Connect to the CometD service and start listening for messages", "If *loop* is ``None`` then :func:`asyncio.get_event_loop` is used to get the default event", "Client class from aiocometd to be used in synchronous code if it runs", "if it runs on a quamash event loop. Since the event loop is", "= { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed)", "scheduled :return: The future associated with the *coro* \"\"\" if loop is None:", "broadcasted with signals. \"\"\" #: Signal emited when the client's state is changed", "blocking is not allowed. Instead, this class is implemented similarly to how asynchronous", "state self.state = ClientState.CONNECTED # listen for incoming messages with suppress(futures.CancelledError): async for", "notify=state_changed) def state(self) -> ClientState: \"\"\"Current state of the client\"\"\" return self._state @state.setter", "= run_coro( self._connect(), self._on_connect_done, self._loop ) async def _connect(self) -> None: \"\"\"Connect to", "# notify listeners that a response has been received response.finished.emit() # pylint: disable=too-many-instance-attributes", "from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures as futures", "None: \"\"\"Disconnect from the CometD service If the client is not connected it", "*coro* \"\"\" if loop is None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop)", "= None #: Contains the response of the server when finished successfully, #:", "for message in client: # emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) #", "A coroutine :param callback: A callback function called with the future object \\", "events, the concurrent.futures.Future can't be awaited, blocking is not allowed. Instead, this class", "the client to *state*\"\"\" # if the state didn't changed then don't do", "if new_state != self._state: self._state = new_state # notify listeners that the state", "self._connect(), self._on_connect_done, self._loop ) async def _connect(self) -> None: \"\"\"Connect to the CometD", "\"\"\" # clear the task member self._connect_task = None error = None with", "# set the error or result attributes of the response depending on #", "# pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD", "state DISCONNECTED = auto() #: Disconnected state due to an error ERROR =", "functools import partial from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any import", "the operation is started and the method immediately returns, and then the results", "aiocometd.Client(self._url, loop=self._loop) as client: # set the asynchronous client attribute self._client = client", "is already connected then it does nothing. \"\"\" # don't do anything if", "Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution", "code if it runs on a quamash event loop. Since the event loop", "and start listening for messages The function returns immediately. On success the :obj:`~CometdClient.connected`", "a non-connected \" \"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response", "initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a non-connected \"", "state of the client to *state*\"\"\" # if the state didn't changed then", "to an error ERROR = auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): # type:", "\"futures.Future[None]\") -> None: \"\"\"Evaluate the result of an asynchronous task Emit signals about", "service as long as the client is open \"\"\" # connect to the", "error = future.exception() if error is not None: self.state = ClientState.ERROR self.error.emit(error) def", "InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data: JsonObject) -> MessageResponse:", "to the CometD service and start listening for messages The function returns immediately.", "method immediately returns, and then the results or the potential errors during the", "synchronous code if it runs on a quamash event loop. Since the event", "Namely, on a method call the operation is started and the method immediately", "CometD client implementation This class enables the asynchronous Client class from aiocometd to", "Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param url: CometD service url", "called with the future object \\ associated with *coro* :param loop: The event", "with the message \"\"\" # check that the client has been initialized if", "*callback* to be called when the *coro* is finished :param coro: A coroutine", "This class enables the asynchronous Client class from aiocometd to be used in", "to how asynchronous network operations are implemented in Qt. Namely, on a method", "state connected = pyqtSignal() #: Signal emited when the client enters the #:", "\"\"\" # set the error or result attributes of the response depending on", "or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED:", "emit state specific signals if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None:", "associated with the message \"\"\" # check that the client has been initialized", "if finished with an error, otherwise None error: Optional[BaseException] = None #: Contains", "the potential errors during the asynchronous operation are broadcasted with signals. \"\"\" #:", "the service as long as the client is open \"\"\" # connect to", "is not allowed. Instead, this class is implemented similarly to how asynchronous network", "the server message_received = pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop]", "on # whether it was completed normally or it exited with an exception", "A list of channels to which the client should \\ subscribe :param loop:", "Awaitable, Callable, Any import concurrent.futures as futures from contextlib import suppress import aiocometd", "results or the potential errors during the asynchronous operation are broadcasted with signals.", "CometD service If the client is not connected it does nothing. \"\"\" if", "channel: Name of the channel :param data: Data to send to the server", "\\ associated with *coro* :param loop: The event loop on which the *coro*", "pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore # pylint:", "= asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None: future.add_done_callback(callback) return future @unique class", "the default event loop. \"\"\" super().__init__() self._url = url self._subscriptions = list(subscriptions) self._loop", "the client\"\"\" return self._state @state.setter # type: ignore def state(self, new_state: ClientState) ->", "the results or the potential errors during the asynchronous operation are broadcasted with", "on failure. If the client is already connected then it does nothing. \"\"\"", "aiocometd from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty,", "CometD message\"\"\" #: Contains the exception object if finished with an error, otherwise", "Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current state of the", "message in client: # emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear", "associated with the asynchronous task \"\"\" # clear the task member self._connect_task =", "None self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task:", "operation is started and the method immediately returns, and then the results or", "the state didn't changed then don't do anything if new_state != self._state: self._state", "whether it was completed normally or it exited with an exception if future.exception()", "*state*\"\"\" # if the state didn't changed then don't do anything if new_state", ":param future: A future associated with the asynchronous task \"\"\" # set the", "# put the client into a disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self,", "about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client attribute self._client =", "the asynchronous task \"\"\" # set the error or result attributes of the", "as client: # set the asynchronous client attribute self._client = client # subscribe", "Contains the response of the server when finished successfully, #: otherwise None result:", "operation are broadcasted with signals. \"\"\" #: Signal emited when the client's state", "client is open \"\"\" # connect to the service async with aiocometd.Client(self._url, loop=self._loop)", ":obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal emited when the client enters the", "the task has been initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \"", "\"\"\" if self.state == ClientState.CONNECTED: # check that the task has been initialized", "-> ClientState: \"\"\"Current state of the client\"\"\" return self._state @state.setter # type: ignore", "Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\":", "= None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the given *coro* and", ":obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error` signal on failure. If the client", "server :return: Return the response associated with the message \"\"\" # check that", "state changed self.state_changed.emit(self._state) # emit state specific signals if new_state in self._state_signals: self._state_signals[new_state].emit()", "as futures from contextlib import suppress import aiocometd from aiocometd.typing import JsonObject #", "import asyncio from functools import partial from typing import Optional, Iterable, TypeVar, Awaitable,", "#: state connected = pyqtSignal() #: Signal emited when the client enters the", "\"\"\"Publish *data* to the given *channel* :param channel: Name of the channel :param", "None error = None with suppress(futures.CancelledError): error = future.exception() if error is not", "channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to the given *channel* :param", "#: Disconnected state DISCONNECTED = auto() #: Disconnected state due to an error", "with aiocometd.Client(self._url, loop=self._loop) as client: # set the asynchronous client attribute self._client =", "ignore \"\"\"Synchronous CometD client implementation This class enables the asynchronous Client class from", "is shared by Qt's and asyncio's events, the concurrent.futures.Future can't be awaited, blocking", "*channel* :param channel: Name of the channel :param data: Data to send to", "a connected state self.state = ClientState.CONNECTED # listen for incoming messages with suppress(futures.CancelledError):", "def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\", "def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of an asynchronous", "emited when the client's state is changed state_changed = pyqtSignal(ClientState) #: Signal emited", "changed state_changed = pyqtSignal(ClientState) #: Signal emited when the client enters the :obj:`~ClientState.CONNECTED`", "called when the *coro* is finished :param coro: A coroutine :param callback: A", "changed self.state_changed.emit(self._state) # emit state specific signals if new_state in self._state_signals: self._state_signals[new_state].emit() def", "Signal emited when the client enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #:", "the client into a connected state self.state = ClientState.CONNECTED # listen for incoming", "quamash event loop. Since the event loop is shared by Qt's and asyncio's", "#: Disconnected state due to an error ERROR = auto() # pylint: disable=too-few-public-methods", "messages sent by the service as long as the client is open \"\"\"", "the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal emited when the client", "\"\"\"Set the state of the client to *state*\"\"\" # if the state didn't", "is ``None`` then :func:`asyncio.get_event_loop` is used to get the default event loop. \"\"\"", "finished successfully, #: otherwise None result: Optional[JsonObject] = None #: Emited when the", "#: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal emited when the client enters", "\"\"\"Current state of the client\"\"\" return self._state @state.setter # type: ignore def state(self,", "run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ ->", "successfully, #: otherwise None result: Optional[JsonObject] = None #: Emited when the response", "# type: ignore def state(self, new_state: ClientState) -> None: \"\"\"Set the state of", "as long as the client is open \"\"\" # connect to the service", "the response associated with the message \"\"\" # check that the client has", "messages with suppress(futures.CancelledError): async for message in client: # emit signal about received", "finished = pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type:", "messages The function returns immediately. On success the :obj:`~CometdClient.connected` signal is emited or", "= pyqtSignal() #: Signal emited when the client enters the #: :obj:`~ClientState.DISCONNECTED` state", "signal on failure. If the client is already connected then it does nothing.", "are implemented in Qt. Namely, on a method call the operation is started", "emited when a message has been received from the server message_received = pyqtSignal(dict)", "\"\"\" # check that the client has been initialized if self.state != ClientState.CONNECTED:", "def disconnect_(self) -> None: \"\"\"Disconnect from the CometD service If the client is", "= auto() #: Disconnected state due to an error ERROR = auto() #", "import concurrent.futures as futures from contextlib import suppress import aiocometd from aiocometd.typing import", "self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current", "with the *coro* \"\"\" if loop is None: loop = asyncio.get_event_loop() future =", "#: Signal emited when the client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected =", "error, otherwise None error: Optional[BaseException] = None #: Contains the response of the", "= client # subscribe to all the channels for subscription in self._subscriptions: await", "check that the client has been initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't", "connected state self.state = ClientState.CONNECTED # listen for incoming messages with suppress(futures.CancelledError): async", "state disconnected = pyqtSignal() #: Signal emited when the client enters the :obj:`~ClientState.ERROR`", "super().__init__() self._url = url self._subscriptions = list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client:", "the client is not connected it does nothing. \"\"\" if self.state == ClientState.CONNECTED:", "of the client to *state*\"\"\" # if the state didn't changed then don't", "future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of an asynchronous message sending task", "class enables the asynchronous Client class from aiocometd to be used in synchronous", "returns immediately. On success the :obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error` signal", "the channels for subscription in self._subscriptions: await client.subscribe(subscription) # put the client into", "of the channel :param data: Data to send to the server :return: Return", "does nothing. \"\"\" # don't do anything if already connected if self.state !=", "auto import asyncio from functools import partial from typing import Optional, Iterable, TypeVar,", "= auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result", "initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self,", "with the future object \\ associated with *coro* :param loop: The event loop", "async def _connect(self) -> None: \"\"\"Connect to the CometD service and retreive the", "#: Signal emited when the client's state is changed state_changed = pyqtSignal(ClientState) #:", "been received from the server message_received = pyqtSignal(dict) def __init__(self, url: str, subscriptions:", "for incoming messages with suppress(futures.CancelledError): async for message in client: # emit signal", "already connected if self.state != ClientState.CONNECTED: # schedule the coroutine for execution self._connect_task", "state of the client\"\"\" return self._state @state.setter # type: ignore def state(self, new_state:", "given *coro* and set *callback* to be called when the *coro* is finished", "Instead, this class is implemented similarly to how asynchronous network operations are implemented", "TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None,", "error: Optional[BaseException] = None #: Contains the response of the server when finished", "*future* :param future: A future associated with the asynchronous task \"\"\" # set", "The future associated with the *coro* \"\"\" if loop is None: loop =", ":obj:`~CometdClient.error` signal on failure. If the client is already connected then it does", "result is an exception. :param future: A future associated with the asynchronous task", "clear the asynchronous client attribute self._client = None # put the client into", "#: Signal emited when the client enters the :obj:`~ClientState.CONNECTED` #: state connected =", "the client enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal emited when", "loop. \"\"\" super().__init__() self._url = url self._subscriptions = list(subscriptions) self._loop = loop or", "import suppress import aiocometd from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore", "self._subscriptions = list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state", "emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client attribute", "= future.exception() if error is not None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self)", "ClientState.CONNECTED: # check that the task has been initialized if self._connect_task is None:", "enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client implementation", "or the potential errors during the asynchronous operation are broadcasted with signals. \"\"\"", "list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED", "@pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current state of the client\"\"\" return self._state", "callback function called with the future object \\ associated with *coro* :param loop:", "default event loop. \"\"\" super().__init__() self._url = url self._subscriptions = list(subscriptions) self._loop =", "received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client attribute self._client = None", "that the task has been initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task", "self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self) ->", "signals. \"\"\" #: Signal emited when the client's state is changed state_changed =", "= pyqtSignal() #: Signal emited when the client enters the :obj:`~ClientState.ERROR` state error", "immediately. On success the :obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error` signal on", "when the response has been received finished = pyqtSignal() # pylint: enable=too-few-public-methods #", "\"\"\" #: Signal emited when the client's state is changed state_changed = pyqtSignal(ClientState)", "-> MessageResponse: \"\"\"Publish *data* to the given *channel* :param channel: Name of the", "exception if future.exception() is not None: response.error = future.exception() else: response.result = future.result()", "_on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of an asynchronous message", "attributes of the response depending on # whether it was completed normally or", "then it does nothing. \"\"\" # don't do anything if already connected if", "def _connect(self) -> None: \"\"\"Connect to the CometD service and retreive the messages", "the client enters the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #: Signal emited", "allowed. Instead, this class is implemented similarly to how asynchronous network operations are", "is changed state_changed = pyqtSignal(ClientState) #: Signal emited when the client enters the", "the given *coro* and set *callback* to be called when the *coro* is", "DISCONNECTED = auto() #: Disconnected state due to an error ERROR = auto()", "be awaited, blocking is not allowed. Instead, this class is implemented similarly to", "from the server message_received = pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str], loop:", "client should \\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks.", "set the asynchronous client attribute self._client = client # subscribe to all the", "connected then it does nothing. \"\"\" # don't do anything if already connected", "# pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result of a", "in a non-connected \" \"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\")", "None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with", "disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore # pylint: enable=no-name-in-module", "response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response @staticmethod def _on_publish_done(response:", "coroutine :param callback: A callback function called with the future object \\ associated", "Signal emited when the client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal()", "if future.exception() is not None: response.error = future.exception() else: response.result = future.result() #", "the given *channel* :param channel: Name of the channel :param data: Data to", "} self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current state", "future associated with the asynchronous task \"\"\" # clear the task member self._connect_task", "Name of the channel :param data: Data to send to the server :return:", "MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse, future:", "= pyqtSignal(ClientState) #: Signal emited when the client enters the :obj:`~ClientState.CONNECTED` #: state", "typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures as futures from", "A response associated with the *future* :param future: A future associated with the", "sent CometD message\"\"\" #: Contains the exception object if finished with an error,", "the CometD service and start listening for messages The function returns immediately. On", "run_coro( self._connect(), self._on_connect_done, self._loop ) async def _connect(self) -> None: \"\"\"Connect to the", "the *coro* is finished :param coro: A coroutine :param callback: A callback function", "be called when the *coro* is finished :param coro: A coroutine :param callback:", "of an asynchronous message sending task :param response: A response associated with the", "# pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client implementation This", "disconnect_(self) -> None: \"\"\"Disconnect from the CometD service If the client is not", "an error ERROR = auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore", "with an error, otherwise None error: Optional[BaseException] = None #: Contains the response", "with the *future* :param future: A future associated with the asynchronous task \"\"\"", "the asynchronous task \"\"\" # clear the task member self._connect_task = None error", "to schedule tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop` is used to get", "return self._state @state.setter # type: ignore def state(self, new_state: ClientState) -> None: \"\"\"Set", "the CometD service If the client is not connected it does nothing. \"\"\"", "and then the results or the potential errors during the asynchronous operation are", "data: Data to send to the server :return: Return the response associated with", "error ERROR = auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The", "import IntEnum, unique, auto import asyncio from functools import partial from typing import", "class from aiocometd to be used in synchronous code if it runs on", "future.exception() if error is not None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) ->", "\"\"\" # connect to the service async with aiocometd.Client(self._url, loop=self._loop) as client: #", "event loop. \"\"\" super().__init__() self._url = url self._subscriptions = list(subscriptions) self._loop = loop", "an exception. :param future: A future associated with the asynchronous task \"\"\" #", "None: \"\"\"Connect to the CometD service and retreive the messages sent by the", "InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"],", "callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule", "loop on which the *coro* should be scheduled :return: The future associated with", "Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param url: CometD service url :param subscriptions:", "for messages The function returns immediately. On success the :obj:`~CometdClient.connected` signal is emited", "all the channels for subscription in self._subscriptions: await client.subscribe(subscription) # put the client", "None # put the client into a disconnected state self.state = ClientState.DISCONNECTED def", "None: raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop)", "None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the", "partial(self._on_publish_done, response), self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None:", "\"\"\"Evaluate the result of an asynchronous task Emit signals about errors if the", "of the client\"\"\" return self._state @state.setter # type: ignore def state(self, new_state: ClientState)", "future.result() # notify listeners that a response has been received response.finished.emit() # pylint:", "has been received from the server message_received = pyqtSignal(dict) def __init__(self, url: str,", "start listening for messages The function returns immediately. On success the :obj:`~CometdClient.connected` signal", "# connect to the service async with aiocometd.Client(self._url, loop=self._loop) as client: # set", "asyncio from functools import partial from typing import Optional, Iterable, TypeVar, Awaitable, Callable,", "loop) if callback is not None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD", "the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #: Signal emited when the client", "suppress import aiocometd from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import", "during the asynchronous operation are broadcasted with signals. \"\"\" #: Signal emited when", "str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to the given *channel* :param channel:", "TypeVar, Awaitable, Callable, Any import concurrent.futures as futures from contextlib import suppress import", "state didn't changed then don't do anything if new_state != self._state: self._state =", "implementation This class enables the asynchronous Client class from aiocometd to be used", "channels to which the client should \\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>`", "asynchronous task Emit signals about errors if the *future's* result is an exception.", ":param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If *loop* is ``None``", "the asynchronous operation are broadcasted with signals. \"\"\" #: Signal emited when the", "not connected it does nothing. \"\"\" if self.state == ClientState.CONNECTED: # check that", "for subscription in self._subscriptions: await client.subscribe(subscription) # put the client into a connected", "class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client implementation This class enables the", "schedule the coroutine for execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop ) async", "self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the", "response.error = future.exception() else: response.result = future.result() # notify listeners that a response", "self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals", "don't do anything if new_state != self._state: self._state = new_state # notify listeners", "CometD service and retreive the messages sent by the service as long as", "client attribute self._client = None # put the client into a disconnected state", "the *future* :param future: A future associated with the asynchronous task \"\"\" #", "if already connected if self.state != ClientState.CONNECTED: # schedule the coroutine for execution", "client: # set the asynchronous client attribute self._client = client # subscribe to", "= TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] =", "Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures as futures from contextlib import suppress", "On success the :obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error` signal on failure.", "response.result = future.result() # notify listeners that a response has been received response.finished.emit()", "Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the given *coro*", "# don't do anything if already connected if self.state != ClientState.CONNECTED: # schedule", "and the method immediately returns, and then the results or the potential errors", "which the client should \\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to", "def state(self) -> ClientState: \"\"\"Current state of the client\"\"\" return self._state @state.setter #", "client has been initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in", "client enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal emited when a", "the response has been received finished = pyqtSignal() # pylint: enable=too-few-public-methods # pylint:", "the :obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error` signal on failure. If the", "can't be awaited, blocking is not allowed. Instead, this class is implemented similarly", "_connect(self) -> None: \"\"\"Connect to the CometD service and retreive the messages sent", "self._connect_task = None error = None with suppress(futures.CancelledError): error = future.exception() if error", "of an asynchronous task Emit signals about errors if the *future's* result is", "been initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a non-connected", "if the state didn't changed then don't do anything if new_state != self._state:", "contextlib import suppress import aiocometd from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module from", "has been initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a", "has been received finished = pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class", "IntEnum, unique, auto import asyncio from functools import partial from typing import Optional,", "asynchronous client attribute self._client = None # put the client into a disconnected", "url :param subscriptions: A list of channels to which the client should \\", "nothing. \"\"\" if self.state == ClientState.CONNECTED: # check that the task has been", "_client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response @staticmethod", "data), partial(self._on_publish_done, response), self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") ->", "CometD client\"\"\" from enum import IntEnum, unique, auto import asyncio from functools import", "None: \"\"\"Set the state of the client to *state*\"\"\" # if the state", "execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop ) async def _connect(self) -> None:", ":param channel: Name of the channel :param data: Data to send to the", "future.exception() is not None: response.error = future.exception() else: response.result = future.result() # notify", "due to an error ERROR = auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): #", "# emit state specific signals if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) ->", "the concurrent.futures.Future can't be awaited, blocking is not allowed. Instead, this class is", "self._state: self._state = new_state # notify listeners that the state changed self.state_changed.emit(self._state) #", "response), self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate", "pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name", "it exited with an exception if future.exception() is not None: response.error = future.exception()", "If the client is not connected it does nothing. \"\"\" if self.state ==", "if the *future's* result is an exception. :param future: A future associated with", "the future object \\ associated with *coro* :param loop: The event loop on", "client is not connected it does nothing. \"\"\" if self.state == ClientState.CONNECTED: #", "the channel :param data: Data to send to the server :return: Return the", "*coro* is finished :param coro: A coroutine :param callback: A callback function called", "url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param url:", "server CONNECTED = auto() #: Disconnected state DISCONNECTED = auto() #: Disconnected state", "CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client implementation This class enables the asynchronous", "result: Optional[JsonObject] = None #: Emited when the response has been received finished", "method call the operation is started and the method immediately returns, and then", "state is changed state_changed = pyqtSignal(ClientState) #: Signal emited when the client enters", "#: otherwise None result: Optional[JsonObject] = None #: Emited when the response has", "# whether it was completed normally or it exited with an exception if", "pyqtSignal(ClientState) #: Signal emited when the client enters the :obj:`~ClientState.CONNECTED` #: state connected", "is not None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from", "Connected with the server CONNECTED = auto() #: Disconnected state DISCONNECTED = auto()", "the event loop is shared by Qt's and asyncio's events, the concurrent.futures.Future can't", "associated with the *coro* \"\"\" if loop is None: loop = asyncio.get_event_loop() future", "the service async with aiocometd.Client(self._url, loop=self._loop) as client: # set the asynchronous client", "self._url = url self._subscriptions = list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client]", "function called with the future object \\ associated with *coro* :param loop: The", "enables the asynchronous Client class from aiocometd to be used in synchronous code", "is used to get the default event loop. \"\"\" super().__init__() self._url = url", "pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result of a sent", "the client is already connected then it does nothing. \"\"\" # don't do", "self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop ) async def _connect(self) -> None: \"\"\"Connect", "as the client is open \"\"\" # connect to the service async with", "response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of", "JsonObject) -> MessageResponse: \"\"\"Publish *data* to the given *channel* :param channel: Name of", "Optional[BaseException] = None #: Contains the response of the server when finished successfully,", "finished :param coro: A coroutine :param callback: A callback function called with the", "ClientState) -> None: \"\"\"Set the state of the client to *state*\"\"\" # if", "if self.state != ClientState.CONNECTED: # schedule the coroutine for execution self._connect_task = run_coro(", "or result attributes of the response depending on # whether it was completed", "from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore # pylint: enable=no-name-in-module from", "should be scheduled :return: The future associated with the *coro* \"\"\" if loop", "anything if already connected if self.state != ClientState.CONNECTED: # schedule the coroutine for", "to the CometD service and retreive the messages sent by the service as", "\"\"\"Disconnect from the CometD service If the client is not connected it does", "connected it does nothing. \"\"\" if self.state == ClientState.CONNECTED: # check that the", "asynchronous Client class from aiocometd to be used in synchronous code if it", "coroutine for execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop ) async def _connect(self)", "an asynchronous task Emit signals about errors if the *future's* result is an", "None: \"\"\"Evaluate the result of an asynchronous message sending task :param response: A", "be scheduled :return: The future associated with the *coro* \"\"\" if loop is", "when the client's state is changed state_changed = pyqtSignal(ClientState) #: Signal emited when", "this class is implemented similarly to how asynchronous network operations are implemented in", "if loop is None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback", "None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the given *coro* and set", "class is implemented similarly to how asynchronous network operations are implemented in Qt.", "-> None: \"\"\"Disconnect from the CometD service If the client is not connected", "callback: A callback function called with the future object \\ associated with *coro*", "a method call the operation is started and the method immediately returns, and", "if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel:", "A future associated with the asynchronous task \"\"\" # clear the task member", "emited when the client enters the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #:", "attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response @staticmethod def", "future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with the", "is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response),", ":obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #: Signal emited when the client enters", "aiocometd to be used in synchronous code if it runs on a quamash", "Disconnected state DISCONNECTED = auto() #: Disconnected state due to an error ERROR", "= ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] =", "_on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result of an asynchronous task Emit", "result of an asynchronous task Emit signals about errors if the *future's* result", "failure. If the client is already connected then it does nothing. \"\"\" #", "member self._connect_task = None error = None with suppress(futures.CancelledError): error = future.exception() if", "been received finished = pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject):", "used to get the default event loop. \"\"\" super().__init__() self._url = url self._subscriptions", "client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #: Signal emited when", "enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal emited when a message", "incoming messages with suppress(futures.CancelledError): async for message in client: # emit signal about", "return response @staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result", "\"\"\"Synchronous CometD client\"\"\" from enum import IntEnum, unique, auto import asyncio from functools", "pyqtSignal() #: Signal emited when the client enters the :obj:`~ClientState.ERROR` state error =", "#: Connected with the server CONNECTED = auto() #: Disconnected state DISCONNECTED =", "otherwise None error: Optional[BaseException] = None #: Contains the response of the server", "Signal emited when the client enters the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal()", "client into a disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") ->", "of channels to which the client should \\ subscribe :param loop: Event :obj:`loop", "pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous", "it does nothing. \"\"\" if self.state == ClientState.CONNECTED: # check that the task", "\" \"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse()", "!= ClientState.CONNECTED: # schedule the coroutine for execution self._connect_task = run_coro( self._connect(), self._on_connect_done,", "client.subscribe(subscription) # put the client into a connected state self.state = ClientState.CONNECTED #", "= pyqtSignal(Exception) #: Signal emited when a message has been received from the", "if self.state == ClientState.CONNECTED: # check that the task has been initialized if", "network operations are implemented in Qt. Namely, on a method call the operation", "self._on_connect_done, self._loop ) async def _connect(self) -> None: \"\"\"Connect to the CometD service", "client # subscribe to all the channels for subscription in self._subscriptions: await client.subscribe(subscription)", "event loop is shared by Qt's and asyncio's events, the concurrent.futures.Future can't be", "or the :obj:`~CometdClient.error` signal on failure. If the client is already connected then", "service url :param subscriptions: A list of channels to which the client should", "url self._subscriptions = list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None", "is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data:", "new_state # notify listeners that the state changed self.state_changed.emit(self._state) # emit state specific", "None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data: JsonObject)", "new_state != self._state: self._state = new_state # notify listeners that the state changed", "# type: ignore \"\"\"The asynchronous result of a sent CometD message\"\"\" #: Contains", "the response of the server when finished successfully, #: otherwise None result: Optional[JsonObject]", "coro: A coroutine :param callback: A callback function called with the future object", "ignore \"\"\"The asynchronous result of a sent CometD message\"\"\" #: Contains the exception", "None #: Contains the response of the server when finished successfully, #: otherwise", "to *state*\"\"\" # if the state didn't changed then don't do anything if", "from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro:", "the client into a disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\")", "messages in a non-connected \" \"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized _client", "with an exception if future.exception() is not None: response.error = future.exception() else: response.result", "with suppress(futures.CancelledError): async for message in client: # emit signal about received messages", "covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop:", "pyqtSignal(Exception) #: Signal emited when a message has been received from the server", "# check that the task has been initialized if self._connect_task is None: raise", "associated with the asynchronous task \"\"\" # set the error or result attributes", "completed normally or it exited with an exception if future.exception() is not None:", "is implemented similarly to how asynchronous network operations are implemented in Qt. Namely,", "Since the event loop is shared by Qt's and asyncio's events, the concurrent.futures.Future", "\"\"\" super().__init__() self._url = url self._subscriptions = list(subscriptions) self._loop = loop or asyncio.get_event_loop()", "when the client enters the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #: Signal", "self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client attribute self._client = None # put", "service async with aiocometd.Client(self._url, loop=self._loop) as client: # set the asynchronous client attribute", "loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of the given", "self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done,", "are broadcasted with signals. \"\"\" #: Signal emited when the client's state is", "check that the task has been initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized", "an error, otherwise None error: Optional[BaseException] = None #: Contains the response of", "the client's state is changed state_changed = pyqtSignal(ClientState) #: Signal emited when the", "the asynchronous Client class from aiocometd to be used in synchronous code if", "@state.setter # type: ignore def state(self, new_state: ClientState) -> None: \"\"\"Set the state", "in Qt. Namely, on a method call the operation is started and the", "is not connected it does nothing. \"\"\" if self.state == ClientState.CONNECTED: # check", "if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to the CometD", "subscribe to all the channels for subscription in self._subscriptions: await client.subscribe(subscription) # put", "result attributes of the response depending on # whether it was completed normally", "given *channel* :param channel: Name of the channel :param data: Data to send", "signal is emited or the :obj:`~CometdClient.error` signal on failure. If the client is", "*coro* and set *callback* to be called when the *coro* is finished :param", "Emit signals about errors if the *future's* result is an exception. :param future:", "client implementation This class enables the asynchronous Client class from aiocometd to be", "If the client is already connected then it does nothing. \"\"\" # don't", "client attribute self._client = client # subscribe to all the channels for subscription", "= pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) ->", "# schedule the coroutine for execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop )", "used in synchronous code if it runs on a quamash event loop. Since", "it does nothing. \"\"\" # don't do anything if already connected if self.state", "long as the client is open \"\"\" # connect to the service async", "async with aiocometd.Client(self._url, loop=self._loop) as client: # set the asynchronous client attribute self._client", "implemented similarly to how asynchronous network operations are implemented in Qt. Namely, on", "and asyncio's events, the concurrent.futures.Future can't be awaited, blocking is not allowed. Instead,", "task member self._connect_task = None error = None with suppress(futures.CancelledError): error = future.exception()", "set *callback* to be called when the *coro* is finished :param coro: A", "a quamash event loop. Since the event loop is shared by Qt's and", "task \"\"\" # clear the task member self._connect_task = None error = None", "the result of an asynchronous message sending task :param response: A response associated", "loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None: future.add_done_callback(callback)", "else: response.result = future.result() # notify listeners that a response has been received", "signals if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to the", "self._loop ) async def _connect(self) -> None: \"\"\"Connect to the CometD service and", "into a connected state self.state = ClientState.CONNECTED # listen for incoming messages with", "type: ignore def state(self, new_state: ClientState) -> None: \"\"\"Set the state of the", "auto() #: Disconnected state due to an error ERROR = auto() # pylint:", "# emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous client", "loop. Since the event loop is shared by Qt's and asyncio's events, the", "enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def", ":return: The future associated with the *coro* \"\"\" if loop is None: loop", "is None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is not", "runs on a quamash event loop. Since the event loop is shared by", "#: Contains the response of the server when finished successfully, #: otherwise None", "emited when the client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected = pyqtSignal() #:", "= None self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, }", "A future associated with the asynchronous task \"\"\" # set the error or", "channels for subscription in self._subscriptions: await client.subscribe(subscription) # put the client into a", "self._state @state.setter # type: ignore def state(self, new_state: ClientState) -> None: \"\"\"Set the", "loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param url: CometD service url :param", "None error: Optional[BaseException] = None #: Contains the response of the server when", "A callback function called with the future object \\ associated with *coro* :param", "suppress(futures.CancelledError): async for message in client: # emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit,", "partial from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures as", "Qt. Namely, on a method call the operation is started and the method", "schedule tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop` is used to get the", "the messages sent by the service as long as the client is open", "ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState, notify=state_changed) def state(self)", "subscription in self._subscriptions: await client.subscribe(subscription) # put the client into a connected state", "state(self, new_state: ClientState) -> None: \"\"\"Set the state of the client to *state*\"\"\"", "error is not None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect", "to the given *channel* :param channel: Name of the channel :param data: Data", "# pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type: ignore #", "to get the default event loop. \"\"\" super().__init__() self._url = url self._subscriptions =", "disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client implementation This class enables", "object if finished with an error, otherwise None error: Optional[BaseException] = None #:", "MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of an asynchronous message sending", "the response depending on # whether it was completed normally or it exited", "publish(self, channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to the given *channel*", "it runs on a quamash event loop. Since the event loop is shared", "anything if new_state != self._state: self._state = new_state # notify listeners that the", "an asynchronous message sending task :param response: A response associated with the *future*", "a disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate", "attribute self._client = None # put the client into a disconnected state self.state", "associated with the *future* :param future: A future associated with the asynchronous task", "loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals = {", "import pyqtSignal, pyqtProperty, QObject # type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import", "asynchronous result of a sent CometD message\"\"\" #: Contains the exception object if", "enters the :obj:`~ClientState.CONNECTED` #: state connected = pyqtSignal() #: Signal emited when the", "to all the channels for subscription in self._subscriptions: await client.subscribe(subscription) # put the", "= None error = None with suppress(futures.CancelledError): error = future.exception() if error is", "``None`` then :func:`asyncio.get_event_loop` is used to get the default event loop. \"\"\" super().__init__()", "*loop* is ``None`` then :func:`asyncio.get_event_loop` is used to get the default event loop.", "# pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint:", "None) -> None: \"\"\" :param url: CometD service url :param subscriptions: A list", "self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"]", "self._state = new_state # notify listeners that the state changed self.state_changed.emit(self._state) # emit", "put the client into a disconnected state self.state = ClientState.DISCONNECTED def _on_connect_done(self, future:", "def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result of an asynchronous task", "the message \"\"\" # check that the client has been initialized if self.state", "disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result of a sent CometD", "send to the server :return: Return the response associated with the message \"\"\"", "# check that the client has been initialized if self.state != ClientState.CONNECTED: raise", "loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If *loop* is ``None`` then", "None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None:", "# notify listeners that the state changed self.state_changed.emit(self._state) # emit state specific signals", "None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current state of the client\"\"\" return", "Return the response associated with the message \"\"\" # check that the client", "aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) # pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co],", "to be called when the *coro* is finished :param coro: A coroutine :param", "state_changed = pyqtSignal(ClientState) #: Signal emited when the client enters the :obj:`~ClientState.CONNECTED` #:", "send messages in a non-connected \" \"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized", "= None with suppress(futures.CancelledError): error = future.exception() if error is not None: self.state", "# pylint: disable=invalid-name def run_coro(coro: Awaitable[T_co], callback: Optional[Callable[[\"futures.Future[T_co]\"], Any]] = None, loop: Optional[asyncio.AbstractEventLoop]", "= url self._subscriptions = list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] =", "nothing. \"\"\" # don't do anything if already connected if self.state != ClientState.CONNECTED:", "is open \"\"\" # connect to the service async with aiocometd.Client(self._url, loop=self._loop) as", "to which the client should \\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used", "server when finished successfully, #: otherwise None result: Optional[JsonObject] = None #: Emited", "Signal emited when the client's state is changed state_changed = pyqtSignal(ClientState) #: Signal", "do anything if already connected if self.state != ClientState.CONNECTED: # schedule the coroutine", "self.state != ClientState.CONNECTED: # schedule the coroutine for execution self._connect_task = run_coro( self._connect(),", "self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None @pyqtProperty(ClientState,", "event loop. Since the event loop is shared by Qt's and asyncio's events,", "= None @pyqtProperty(ClientState, notify=state_changed) def state(self) -> ClientState: \"\"\"Current state of the client\"\"\"", "future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result of an asynchronous task Emit signals", "future.exception() else: response.result = future.result() # notify listeners that a response has been", "immediately returns, and then the results or the potential errors during the asynchronous", "emited or the :obj:`~CometdClient.error` signal on failure. If the client is already connected", "_connect_task \" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish", "of the given *coro* and set *callback* to be called when the *coro*", "CometD service and start listening for messages The function returns immediately. On success", "an exception if future.exception() is not None: response.error = future.exception() else: response.result =", "asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is not None: future.add_done_callback(callback) return future", "pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes class CometdClient(QObject): # type: ignore \"\"\"Synchronous CometD client", "None: \"\"\"Connect to the CometD service and start listening for messages The function", "list of channels to which the client should \\ subscribe :param loop: Event", "error = None with suppress(futures.CancelledError): error = future.exception() if error is not None:", "__init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None: \"\"\" :param", "MessageResponse: \"\"\"Publish *data* to the given *channel* :param channel: Name of the channel", "\"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of an asynchronous message sending task :param", "response depending on # whether it was completed normally or it exited with", "Optional[aiocometd.Client] = None self._state = ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected,", "on a method call the operation is started and the method immediately returns,", "awaited, blocking is not allowed. Instead, this class is implemented similarly to how", "subscriptions: A list of channels to which the client should \\ subscribe :param", "get the default event loop. \"\"\" super().__init__() self._url = url self._subscriptions = list(subscriptions)", ":param coro: A coroutine :param callback: A callback function called with the future", "future associated with the *coro* \"\"\" if loop is None: loop = asyncio.get_event_loop()", "started and the method immediately returns, and then the results or the potential", "is not None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #:", "signals about errors if the *future's* result is an exception. :param future: A", "the state changed self.state_changed.emit(self._state) # emit state specific signals if new_state in self._state_signals:", "\"\"\"Connect to the CometD service and retreive the messages sent by the service", "ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True) #", "-> None: \"\"\"Connect to the CometD service and retreive the messages sent by", "-> None: \"\"\"Evaluate the result of an asynchronous task Emit signals about errors", "type: ignore # pylint: enable=no-name-in-module from aiocometd_chat_demo.exceptions import InvalidStateError T_co = TypeVar(\"T_co\", covariant=True)", "server message_received = pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] =", "by the service as long as the client is open \"\"\" # connect", "on a quamash event loop. Since the event loop is shared by Qt's", "if error is not None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None:", "\"state.\") if self._client is None: raise InvalidStateError(\"Uninitialized _client attribute.\") response = MessageResponse() run_coro(self._client.publish(channel,", "*coro* should be scheduled :return: The future associated with the *coro* \"\"\" if", "self.state_changed.emit(self._state) # emit state specific signals if new_state in self._state_signals: self._state_signals[new_state].emit() def connect_(self)", "success the :obj:`~CometdClient.connected` signal is emited or the :obj:`~CometdClient.error` signal on failure. If", "future: A future associated with the asynchronous task \"\"\" # set the error", "type: ignore \"\"\"Synchronous CometD client implementation This class enables the asynchronous Client class", "result of a sent CometD message\"\"\" #: Contains the exception object if finished", "the exception object if finished with an error, otherwise None error: Optional[BaseException] =", ":param subscriptions: A list of channels to which the client should \\ subscribe", "implemented in Qt. Namely, on a method call the operation is started and", ":param future: A future associated with the asynchronous task \"\"\" # clear the", "otherwise None result: Optional[JsonObject] = None #: Emited when the response has been", "<asyncio.BaseEventLoop>` used to schedule tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop` is used", "from contextlib import suppress import aiocometd from aiocometd.typing import JsonObject # pylint: disable=no-name-in-module", "the :obj:`~CometdClient.error` signal on failure. If the client is already connected then it", "\" \"attribute.\") self._connect_task.cancel() def publish(self, channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data*", "self._connect_task.cancel() def publish(self, channel: str, data: JsonObject) -> MessageResponse: \"\"\"Publish *data* to the", "connected = pyqtSignal() #: Signal emited when the client enters the #: :obj:`~ClientState.DISCONNECTED`", "is emited or the :obj:`~CometdClient.error` signal on failure. If the client is already", "= ClientState.CONNECTED # listen for incoming messages with suppress(futures.CancelledError): async for message in", "!= self._state: self._state = new_state # notify listeners that the state changed self.state_changed.emit(self._state)", "def connect_(self) -> None: \"\"\"Connect to the CometD service and start listening for", "# set the asynchronous client attribute self._client = client # subscribe to all", "not None: self.state = ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from the", "loop=self._loop) as client: # set the asynchronous client attribute self._client = client #", "with the asynchronous task \"\"\" # set the error or result attributes of", "Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop`", "with suppress(futures.CancelledError): error = future.exception() if error is not None: self.state = ClientState.ERROR", "the error or result attributes of the response depending on # whether it", "self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from the CometD service If the client", "from the CometD service If the client is not connected it does nothing.", ":return: Return the response associated with the message \"\"\" # check that the", "Optional[JsonObject] = None #: Emited when the response has been received finished =", "future associated with the asynchronous task \"\"\" # set the error or result", "self.state = ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result of", "when finished successfully, #: otherwise None result: Optional[JsonObject] = None #: Emited when", "\\ subscribe :param loop: Event :obj:`loop <asyncio.BaseEventLoop>` used to schedule tasks. If *loop*", "type: ignore \"\"\"The asynchronous result of a sent CometD message\"\"\" #: Contains the", "Contains the exception object if finished with an error, otherwise None error: Optional[BaseException]", "= list(subscriptions) self._loop = loop or asyncio.get_event_loop() self._client: Optional[aiocometd.Client] = None self._state =", "ClientState: \"\"\"Current state of the client\"\"\" return self._state @state.setter # type: ignore def", "= new_state # notify listeners that the state changed self.state_changed.emit(self._state) # emit state", "ClientState.CONNECTED # listen for incoming messages with suppress(futures.CancelledError): async for message in client:", "= ClientState.DISCONNECTED def _on_connect_done(self, future: \"futures.Future[None]\") -> None: \"\"\"Evaluate the result of an", "service If the client is not connected it does nothing. \"\"\" if self.state", "retreive the messages sent by the service as long as the client is", "# type: ignore \"\"\"Synchronous CometD client implementation This class enables the asynchronous Client", "pyqtSignal(dict) def __init__(self, url: str, subscriptions: Iterable[str], loop: Optional[asyncio.AbstractEventLoop] = None) -> None:", "message) # clear the asynchronous client attribute self._client = None # put the", "response has been received finished = pyqtSignal() # pylint: enable=too-few-public-methods # pylint: disable=too-many-instance-attributes", ":param callback: A callback function called with the future object \\ associated with", "ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages in a non-connected \" \"state.\") if self._client is", "and set *callback* to be called when the *coro* is finished :param coro:", "then :func:`asyncio.get_event_loop` is used to get the default event loop. \"\"\" super().__init__() self._url", "= MessageResponse() run_coro(self._client.publish(channel, data), partial(self._on_publish_done, response), self._loop) return response @staticmethod def _on_publish_done(response: MessageResponse,", "connected if self.state != ClientState.CONNECTED: # schedule the coroutine for execution self._connect_task =", "Optional, Iterable, TypeVar, Awaitable, Callable, Any import concurrent.futures as futures from contextlib import", "\"futures.Future[T_co]\": \"\"\"Schedule the execution of the given *coro* and set *callback* to be", "or it exited with an exception if future.exception() is not None: response.error =", "suppress(futures.CancelledError): error = future.exception() if error is not None: self.state = ClientState.ERROR self.error.emit(error)", "pyqtSignal() #: Signal emited when the client enters the #: :obj:`~ClientState.DISCONNECTED` state disconnected", "associated with *coro* :param loop: The event loop on which the *coro* should", "loop is shared by Qt's and asyncio's events, the concurrent.futures.Future can't be awaited,", "the client has been initialized if self.state != ClientState.CONNECTED: raise InvalidStateError(\"Can't send messages", "when a message has been received from the server message_received = pyqtSignal(dict) def", "is not None: response.error = future.exception() else: response.result = future.result() # notify listeners", "@staticmethod def _on_publish_done(response: MessageResponse, future: \"futures.Future[JsonObject]\") -> None: \"\"\"Evaluate the result of an", "future object \\ associated with *coro* :param loop: The event loop on which", "if callback is not None: future.add_done_callback(callback) return future @unique class ClientState(IntEnum): \"\"\"CometD client", ":param url: CometD service url :param subscriptions: A list of channels to which", "is started and the method immediately returns, and then the results or the", "ClientState.DISCONNECTED self._state_signals = { ClientState.CONNECTED: self.connected, ClientState.DISCONNECTED: self.disconnected, } self._connect_task: Optional[\"futures.Future[None]\"] = None", "then the results or the potential errors during the asynchronous operation are broadcasted", "states\"\"\" #: Connected with the server CONNECTED = auto() #: Disconnected state DISCONNECTED", "state(self) -> ClientState: \"\"\"Current state of the client\"\"\" return self._state @state.setter # type:", "asynchronous network operations are implemented in Qt. Namely, on a method call the", "auto() # pylint: disable=too-few-public-methods class MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result of", "be used in synchronous code if it runs on a quamash event loop.", "ClientState.CONNECTED: # schedule the coroutine for execution self._connect_task = run_coro( self._connect(), self._on_connect_done, self._loop", "future: A future associated with the asynchronous task \"\"\" # clear the task", "from functools import partial from typing import Optional, Iterable, TypeVar, Awaitable, Callable, Any", "that the state changed self.state_changed.emit(self._state) # emit state specific signals if new_state in", "a sent CometD message\"\"\" #: Contains the exception object if finished with an", "return future @unique class ClientState(IntEnum): \"\"\"CometD client states\"\"\" #: Connected with the server", "a message has been received from the server message_received = pyqtSignal(dict) def __init__(self,", "execution of the given *coro* and set *callback* to be called when the", "# put the client into a connected state self.state = ClientState.CONNECTED # listen", "event loop on which the *coro* should be scheduled :return: The future associated", "loop is None: loop = asyncio.get_event_loop() future = asyncio.run_coroutine_threadsafe(coro, loop) if callback is", "enum import IntEnum, unique, auto import asyncio from functools import partial from typing", "Qt's and asyncio's events, the concurrent.futures.Future can't be awaited, blocking is not allowed.", "client\"\"\" return self._state @state.setter # type: ignore def state(self, new_state: ClientState) -> None:", "= None, loop: Optional[asyncio.AbstractEventLoop] = None,) \\ -> \"futures.Future[T_co]\": \"\"\"Schedule the execution of", "MessageResponse(QObject): # type: ignore \"\"\"The asynchronous result of a sent CometD message\"\"\" #:", "self.state = ClientState.CONNECTED # listen for incoming messages with suppress(futures.CancelledError): async for message", "client: # emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the asynchronous", "has been initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel()", "the asynchronous client attribute self._client = client # subscribe to all the channels", "\"\"\"Connect to the CometD service and start listening for messages The function returns", "emited when the client enters the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal", "from enum import IntEnum, unique, auto import asyncio from functools import partial from", "in self._state_signals: self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to the CometD service and", "def state(self, new_state: ClientState) -> None: \"\"\"Set the state of the client to", "exited with an exception if future.exception() is not None: response.error = future.exception() else:", "attribute self._client = client # subscribe to all the channels for subscription in", "import JsonObject # pylint: disable=no-name-in-module from PyQt5.QtCore import pyqtSignal, pyqtProperty, QObject # type:", "tasks. If *loop* is ``None`` then :func:`asyncio.get_event_loop` is used to get the default", ":param loop: The event loop on which the *coro* should be scheduled :return:", "self._client = None # put the client into a disconnected state self.state =", "ClientState.ERROR self.error.emit(error) def disconnect_(self) -> None: \"\"\"Disconnect from the CometD service If the", "clear the task member self._connect_task = None error = None with suppress(futures.CancelledError): error", "then don't do anything if new_state != self._state: self._state = new_state # notify", "== ClientState.CONNECTED: # check that the task has been initialized if self._connect_task is", "to the server :return: Return the response associated with the message \"\"\" #", "self._state_signals[new_state].emit() def connect_(self) -> None: \"\"\"Connect to the CometD service and start listening", "message has been received from the server message_received = pyqtSignal(dict) def __init__(self, url:", "call the operation is started and the method immediately returns, and then the", "None: \"\"\" :param url: CometD service url :param subscriptions: A list of channels", "task \"\"\" # set the error or result attributes of the response depending", "in client: # emit signal about received messages self._loop.call_soon_threadsafe(self.message_received.emit, message) # clear the", "to be used in synchronous code if it runs on a quamash event", "asynchronous operation are broadcasted with signals. \"\"\" #: Signal emited when the client's", "been initialized if self._connect_task is None: raise InvalidStateError(\"Uninitialized _connect_task \" \"attribute.\") self._connect_task.cancel() def", "exception object if finished with an error, otherwise None error: Optional[BaseException] = None", "when the *coro* is finished :param coro: A coroutine :param callback: A callback", "finished with an error, otherwise None error: Optional[BaseException] = None #: Contains the", ":func:`asyncio.get_event_loop` is used to get the default event loop. \"\"\" super().__init__() self._url =", "the asynchronous client attribute self._client = None # put the client into a", "depending on # whether it was completed normally or it exited with an", "futures from contextlib import suppress import aiocometd from aiocometd.typing import JsonObject # pylint:", "message sending task :param response: A response associated with the *future* :param future:", "#: Contains the exception object if finished with an error, otherwise None error:", "the :obj:`~ClientState.ERROR` state error = pyqtSignal(Exception) #: Signal emited when a message has", "self._client = client # subscribe to all the channels for subscription in self._subscriptions:", "asynchronous task \"\"\" # clear the task member self._connect_task = None error =", "channel :param data: Data to send to the server :return: Return the response", "asynchronous client attribute self._client = client # subscribe to all the channels for", "the server when finished successfully, #: otherwise None result: Optional[JsonObject] = None #:", "Data to send to the server :return: Return the response associated with the" ]
[ "template for plug in plugs: html = plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes,", "<reponame>Wyvryn/YAuB \"\"\" Main webapp logic All setup config and endpoint definitions are stored", "ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks into a select multiple field form.tags.choices", "form = LoginForm(request.form) # Handle logging in if request.method == 'POST': if form.validate_on_submit():", "== \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else:", "import Blueprint, flash, redirect, render_template, request, url_for from flask.ext.login import (LoginManager, current_user, login_required,", "plugins \"\"\" from dateutil import parser import models from flask import Blueprint, flash,", "return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup when", "a specific page to load, get articles for that page Otherwise, load page", "__name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form =", "models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form,", "return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id):", "article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def", "edit an article If no article id is given we will create a", "creation .. TODO:: page to show loaded plugins \"\"\" from dateutil import parser", "== \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\"))", "form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article at", "models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage),", "\"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: # If the password field", "from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm", "time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form", "= [] if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files:", "return redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj) if request.method == \"POST\" and", "render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article", "== 0: # If the password field has no data, don't change the", "= Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes =", "info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display", "load, get articles for that page Otherwise, load page 1\"\"\" if not models.hasSetupRun():", "we're giving a specific page to load, get articles for that page Otherwise,", "= markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) # Run any plugins on our", "tag(id): \"\"\"Loads the main page but only shows articles that have a given", "= ConfigForm(obj=obj) # populate the form with our blog data if request.method ==", "obj = models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj) if not isNew: #", "render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST'])", "the YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return", "has no data, don't change the user's password obj.password = password else: obj.set_password(form.password.data)", "pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted", "def article(id): \"\"\"Display an article with a given id\"\"\" article = models.getArticle(id) markdown", "login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url)", "= models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput", "== \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: # If the password", "picture is passed, don't crash pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush()", "from flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed,", "else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user", "obj = models.getAuthor(int(current_user.rowid)) # Hold on to this until we validate the fields", "@main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article at a given id\"\"\"", "article at a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully", "setup config and endpoint definitions are stored here .. TODO:: allow user creation", "models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog", "\"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else:", "AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown from utils import flash_errors, load_plugins markdowner", "== 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url = request.args.get(\"next\")", "uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: # If no picture is passed, don't", "we edit the article at the given id\"\"\" isNew = not id if", "form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename", "[] if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try:", "are logged in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form)", "request, url_for from flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads import", "password field has no data, don't change the user's password obj.password = password", "@main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with a given id\"\"\" article = models.getArticle(id)", "if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: # If", "render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with a given id\"\"\"", "== \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap'])", "models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices = [] if request.method ==", "(LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms", "run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj) if request.method", "form with our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if", "methods=['GET', 'POST']) def login(): form = LoginForm(request.form) # Handle logging in if request.method", "first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config()", "models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models)", "\"\"\"Initial user setup when accessing YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only", "from markdown2 import Markdown from utils import flash_errors, load_plugins markdowner = Markdown() login_manager", "our html before passing it to the template for plug in plugs: html", "methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold", "@login_required def admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj = models.getConfigObj() form =", "def admin_delete(id): \"\"\"Deletes an article at a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None,", "settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET',", "'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on", "return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST'])", "# Hold on to this until we validate the fields from the form", "flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with a", "this until we validate the fields from the form password = models.getAuthor(int(current_user.rowid)).password form", "if len(form.password.data) == 0: # If the password field has no data, don't", "UploadSet from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown from", "if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return", "id is given we will create a new article, Otherwise we edit the", "article(id): \"\"\"Display an article with a given id\"\"\" article = models.getArticle(id) markdown =", "filename except UploadNotAllowed: # If no picture is passed, don't crash pass obj.published", "are stored here .. TODO:: allow user creation .. TODO:: page to show", "article, Otherwise we edit the article at the given id\"\"\" isNew = not", "LoginForm from markdown2 import Markdown from utils import flash_errors, load_plugins markdowner = Markdown()", "we validate the fields from the form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj)", "obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else:", "our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted", "header_includes, footer_includes = load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id))", "All setup config and endpoint definitions are stored here .. TODO:: allow user", "= models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown)", "= models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page,", "redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid))", "in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: # If", "url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout():", "once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj) if request.method == \"POST\"", "try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: # If no picture", "else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You are", "= models.getAuthor(int(current_user.rowid)) # Hold on to this until we validate the fields from", "= request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout')", "select multiple field form.tags.choices = [ (a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid", "it to the template for plug in plugs: html = plug.run(html) return render_template(", "flask import Blueprint, flash, redirect, render_template, request, url_for from flask.ext.login import (LoginManager, current_user,", "from dateutil import parser import models from flask import Blueprint, flash, redirect, render_template,", "UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def", ".. TODO:: page to show loaded plugins \"\"\" from dateutil import parser import", "= UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader", "a select multiple field form.tags.choices = [ (a.tag, a.rowid) for a in models.ArticleTag.query.filter(", "if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return", "that page Otherwise, load page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage =", "from flask import Blueprint, flash, redirect, render_template, request, url_for from flask.ext.login import (LoginManager,", "models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\",", "@main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj =", "to load, get articles for that page Otherwise, load page 1\"\"\" if not", "if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url", "form = ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks into a select multiple", "redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes", "and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: # If the password field has", "sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when accessing", "Handle logging in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged", "import parser import models from flask import Blueprint, flash, redirect, render_template, request, url_for", "filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: # If no picture is", "logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page We", "to this until we validate the fields from the form password = models.getAuthor(int(current_user.rowid)).password", "# Handle logging in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are", "field has no data, don't change the user's password obj.password = password else:", "form.tags.choices = [ (a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag')", "isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html',", "# populate the form with our blog data if request.method == \"POST\" and", "articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when", "Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins()", "once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj) if request.method == \"POST\"", "create a new article, Otherwise we edit the article at the given id\"\"\"", "pass if 'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except", "routes - / and /<int:page> If we're giving a specific page to load,", "field form.tags.choices = [ (a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id)", "the fields from the form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate", "to the template for plug in plugs: html = plug.run(html) return render_template( 'article.html',", "import models from flask import Blueprint, flash, redirect, render_template, request, url_for from flask.ext.login", "= password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form)", "defaults={'page': 1}) def home(page): \"\"\"Home Page We have two routes - / and", "models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form,", "return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to change", "render_template, request, url_for from flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads", "form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html',", "initial_setup_blog(): \"\"\"Initial blog setup when accessing the YAuB for the first time\"\"\" if", "request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return", "page Otherwise, load page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage()", "'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article at a given id\"\"\" obj =", "main page but only shows articles that have a given tag\"\"\" return render_template(", "url_for from flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads import IMAGES,", "= not id if isNew: obj = models.Article() else: obj = models.getArticle(int(id)) obj.author", "definitions are stored here .. TODO:: allow user creation .. TODO:: page to", "models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks", "return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user():", "uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins() main = Blueprint('main', __name__)", "if isNew: obj = models.Article() else: obj = models.getArticle(int(id)) obj.author = current_user.rowid form", "our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) ==", "flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet", "def initial_setup_user(): \"\"\"Initial user setup when accessing YAuB for the first time\"\"\" if", "hooks into a select multiple field form.tags.choices = [ (a.tag, a.rowid) for a", "shows articles that have a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, entries=models.getArticlesWithTag(id,", "= LoginForm(request.form) # Handle logging in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user)", "\"\"\"Page to change YAuB settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj) # populate", "don't change the user's password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted", "firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj", "\"\"\" from dateutil import parser import models from flask import Blueprint, flash, redirect,", "None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to create or", "logic All setup config and endpoint definitions are stored here .. TODO:: allow", "not isNew: # Bootstrap-TagsInput hooks into a select multiple field form.tags.choices = [", "\"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on to this until we", "request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required", "form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form,", "get articles for that page Otherwise, load page 1\"\"\" if not models.hasSetupRun(): return", "the given id\"\"\" isNew = not id if isNew: obj = models.Article() else:", "request.method == \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try: filename =", "redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes,", "Otherwise we edit the article at the given id\"\"\" isNew = not id", "footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page", "main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def", "id if isNew: obj = models.Article() else: obj = models.getArticle(int(id)) obj.author = current_user.rowid", "else: obj = models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj) if not isNew:", "if request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user')", ").order_by('tag') ] else: form.tags.choices = [] if request.method == \"POST\" and form.validate(): form.populate_obj(obj)", "= article.content markdown = markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) # Run any", "= plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\")", "to show loaded plugins \"\"\" from dateutil import parser import models from flask", "methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj = models.getConfigObj()", "only shows articles that have a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes,", "will create a new article, Otherwise we edit the article at the given", "crash pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully", "return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET',", "firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup when accessing YAuB for", "request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return", "\"\"\"Loads the main page but only shows articles that have a given tag\"\"\"", "header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog():", "two routes - / and /<int:page> If we're giving a specific page to", "redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj) if request.method == \"POST\" and form.validate():", "current_user.rowid form = ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks into a select", "data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: #", "IMAGES) plugs, header_includes, footer_includes = load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id):", "return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page We have two", "article.content markdown = markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) # Run any plugins", "form with our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit()", "editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required", "ConfigForm(obj=obj) # populate the form with our blog data if request.method == \"POST\"", "editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id':", "flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to", "'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: #", "load page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage =", "@main.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) # Handle logging in if", "models.getConfigObj() form = ConfigForm(obj=obj) # populate the form with our blog data if", "form = AuthorForm(obj=obj) # populate the form with our blog data if request.method", "models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html',", "redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\",", "return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with a given", "is given we will create a new article, Otherwise we edit the article", "new article, Otherwise we edit the article at the given id\"\"\" isNew =", ".. TODO:: allow user creation .. TODO:: page to show loaded plugins \"\"\"", "webapp logic All setup config and endpoint definitions are stored here .. TODO::", "and endpoint definitions are stored here .. TODO:: allow user creation .. TODO::", "before passing it to the template for plug in plugs: html = plug.run(html)", "models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models)", "editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def", "for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices = []", "blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0:", "ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown from utils import flash_errors, load_plugins", "obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\",", "no picture is passed, don't crash pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj)", "are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page", "into a select multiple field form.tags.choices = [ (a.tag, a.rowid) for a in", "current_user, login_required, login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms import", "] else: form.tags.choices = [] if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if", "import Markdown from utils import flash_errors, load_plugins markdowner = Markdown() login_manager = LoginManager()", "setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj) if request.method ==", "form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: # If the password field has no", "plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def", "settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def", "def admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj)", "== int(id) ).order_by('tag') ] else: form.tags.choices = [] if request.method == \"POST\" and", "admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj) #", "# Bootstrap-TagsInput hooks into a select multiple field form.tags.choices = [ (a.tag, a.rowid)", "= AuthorForm(obj=obj) # populate the form with our blog data if request.method ==", "= models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET',", "# If the password field has no data, don't change the user's password", "UploadNotAllowed: # If no picture is passed, don't crash pass obj.published = parser.parse(obj.published)", "not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template(", "a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article')", "@main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page We have two routes -", "flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home", "import flash_errors, load_plugins markdowner = Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES)", "@main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page We have two routes - /", "a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, entries=models.getArticlesWithTag(id, 10), models=models, sidebar=True )", "methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to create or edit", "parser import models from flask import Blueprint, flash, redirect, render_template, request, url_for from", "html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page but", "YAuB settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj) # populate the form with", "article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\" obj", "parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\"))", "obj.banner = filename except UploadNotAllowed: # If no picture is passed, don't crash", "a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices =", "plugins on our html before passing it to the template for plug in", "endpoint definitions are stored here .. TODO:: allow user creation .. TODO:: page", "= markdowner.convert(markdown) # Run any plugins on our html before passing it to", "and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return", "markdown = markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) # Run any plugins on", "admin_delete(id): \"\"\"Deletes an article at a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id))", "Blueprint, flash, redirect, render_template, request, url_for from flask.ext.login import (LoginManager, current_user, login_required, login_user,", "flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author", "validate the fields from the form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) #", "- / and /<int:page> If we're giving a specific page to load, get", "\"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else:", "@main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup when accessing YAuB for the", "flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup", "\"\"\"Deletes an article at a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj)", "@main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page but only shows articles that have", "def admin_article(id): \"\"\"Page to create or edit an article If no article id", "but only shows articles that have a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes,", "'<br />') html = markdowner.convert(markdown) # Run any plugins on our html before", "and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap =", "passing it to the template for plug in plugs: html = plug.run(html) return", "plugs, header_includes, footer_includes = load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return", "models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id,", "config and endpoint definitions are stored here .. TODO:: allow user creation ..", "run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj) if request.method", "(a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices", "redirect, render_template, request, url_for from flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user) from", "nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True,", "multiple field form.tags.choices = [ (a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid ==", "with our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully", "return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes,", "html before passing it to the template for plug in plugs: html =", "= [ (a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ]", "logout_user() flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page):", "in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices = [] if request.method", "= models.Config() form = ConfigForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj)", "models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET',", "def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) #", "user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id):", "articles that have a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, entries=models.getArticlesWithTag(id, 10),", "/ and /<int:page> If we're giving a specific page to load, get articles", "isNew = not id if isNew: obj = models.Article() else: obj = models.getArticle(int(id))", "\"\"\"Initial blog setup when accessing the YAuB for the first time\"\"\" if models.hasSetupRun():", "stored here .. TODO:: allow user creation .. TODO:: page to show loaded", "forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown from utils import", "the article at the given id\"\"\" isNew = not id if isNew: obj", "Markdown from utils import flash_errors, load_plugins markdowner = Markdown() login_manager = LoginManager() uploaded_photos", "page but only shows articles that have a given tag\"\"\" return render_template( 'home.html',", "created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST'])", "flash(\"You are logged in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else:", "return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required", "form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return", "setup when accessing YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup", "Otherwise, load page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage", "first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author()", "pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when accessing the", "article at the given id\"\"\" isNew = not id if isNew: obj =", "request.method == \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data) == 0: # If the", "an article at a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit()", "= models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page),", "redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You", "models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) #", "models.Author() form = AuthorForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj)", "form.populate_obj(obj) if len(form.password.data) == 0: # If the password field has no data,", "user creation .. TODO:: page to show loaded plugins \"\"\" from dateutil import", "int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def", "obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article')", "= parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return", "nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when accessing the YAuB", "else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html',", "If the password field has no data, don't change the user's password obj.password", "logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm, ConfigForm,", "and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form)", "== \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\"))", "specific page to load, get articles for that page Otherwise, load page 1\"\"\"", "articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models,", "flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\",", "markdowner = Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes", "plug in plugs: html = plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article,", "request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: # If no", "models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True)", "the form with our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj)", "if 'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed:", "form = ConfigForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully", "when accessing YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\"", "article If no article id is given we will create a new article,", "article with a given id\"\"\" article = models.getArticle(id) markdown = article.content markdown =", "giving a specific page to load, get articles for that page Otherwise, load", "If we're giving a specific page to load, get articles for that page", "else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page", "filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: # If no picture is", "logging in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\",", "the form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the form with", "1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page, articlesPerPage)", "footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial", "except UploadNotAllowed: # If no picture is passed, don't crash pass if 'banner'", "articles for that page Otherwise, load page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user'))", "= load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET',", "models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True)", "def home(page): \"\"\"Home Page We have two routes - / and /<int:page> If", "return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def", "models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices = [] if request.method == \"POST\"", "edit the article at the given id\"\"\" isNew = not id if isNew:", "= models.getConfigObj() form = ConfigForm(obj=obj) # populate the form with our blog data", "def login(): form = LoginForm(request.form) # Handle logging in if request.method == 'POST':", "render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads", "ConfigForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog", "render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def", "\"\"\"Display an article with a given id\"\"\" article = models.getArticle(id) markdown = article.content", "blog setup when accessing the YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only", "return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an", "home(page): \"\"\"Home Page We have two routes - / and /<int:page> If we're", "if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try: filename", "settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj) # populate the form with our", "password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the form with our blog", "try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: # If no picture", "\"\"\"Page to create or edit an article If no article id is given", "here .. TODO:: allow user creation .. TODO:: page to show loaded plugins", "picture is passed, don't crash pass if 'banner' in request.files: try: filename =", "blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None},", "= LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins() main =", "sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page but only shows articles", "page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage = models.getArticlesPerPage() nextPage = models.nextPage(page,", "data, don't change the user's password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully", "and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form)", "crash pass if 'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename", "create or edit an article If no article id is given we will", "return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) # Handle logging", "models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page", "import IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2", "user's password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return", "with a given id\"\"\" article = models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n',", "or edit an article If no article id is given we will create", "filename except UploadNotAllowed: # If no picture is passed, don't crash pass if", "logout(): logout_user() flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def", "form.tags.choices = [] if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in", "@login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form)", "= ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks into a select multiple field", "else: flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET',", "flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You are logged", "0: # If the password field has no data, don't change the user's", "'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url = request.args.get(\"next\") or", "UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown", "@login_required def admin_delete(id): \"\"\"Deletes an article at a given id\"\"\" obj = models.getArticle(int(id))", "isNew: # Bootstrap-TagsInput hooks into a select multiple field form.tags.choices = [ (a.tag,", "markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) # Run any plugins on our html", "Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form", "form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup when accessing YAuB", "don't crash pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit()", "password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return", "'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the", "an article with a given id\"\"\" article = models.getArticle(id) markdown = article.content markdown", "flash_errors, load_plugins markdowner = Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs,", "user setup when accessing YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run", "html = plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True )", "TODO:: page to show loaded plugins \"\"\" from dateutil import parser import models", "markdowner.convert(markdown) # Run any plugins on our html before passing it to the", "is passed, don't crash pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags,", "@main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to", "a new article, Otherwise we edit the article at the given id\"\"\" isNew", "a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else: form.tags.choices = [] if", "Main webapp logic All setup config and endpoint definitions are stored here ..", "'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def", "flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from", "flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\")", "accessing the YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\"", "models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj)", "isNew: obj = models.Article() else: obj = models.getArticle(int(id)) obj.author = current_user.rowid form =", "given we will create a new article, Otherwise we edit the article at", "at a given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted", "\"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap", "int(id) ).order_by('tag') ] else: form.tags.choices = [] if request.method == \"POST\" and form.validate():", "@main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to create or edit an article", "allow user creation .. TODO:: page to show loaded plugins \"\"\" from dateutil", "Page We have two routes - / and /<int:page> If we're giving a", "load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST'])", "Run any plugins on our html before passing it to the template for", "blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog", "utils import flash_errors, load_plugins markdowner = Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos',", "redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial", "form = ConfigForm(obj=obj) # populate the form with our blog data if request.method", "/<int:page> If we're giving a specific page to load, get articles for that", "= models.Author() form = AuthorForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data)", "return redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj) if request.method == \"POST\" and", "'POST']) @login_required def admin_blog(): \"\"\"Page to change YAuB settings\"\"\" obj = models.getConfigObj() form", "flash, redirect, render_template, request, url_for from flask.ext.login import (LoginManager, current_user, login_required, login_user, logout_user)", "'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when accessing the YAuB for the first", "from utils import flash_errors, load_plugins markdowner = Markdown() login_manager = LoginManager() uploaded_photos =", "in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\",", "login(): form = LoginForm(request.form) # Handle logging in if request.method == 'POST': if", "on to this until we validate the fields from the form password =", "= filename except UploadNotAllowed: # If no picture is passed, don't crash pass", "set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET',", "from the form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the form", "setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj) if request.method ==", "1}) def home(page): \"\"\"Home Page We have two routes - / and /<int:page>", "Hold on to this until we validate the fields from the form password", "obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\",", "admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on to this until", "@login_required def admin_article(id): \"\"\"Page to create or edit an article If no article", "= uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: # If no picture is passed,", "import (LoginManager, current_user, login_required, login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from", "form.populate_obj(obj) if 'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except", "login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm,", "models.getAuthor(int(current_user.rowid)) # Hold on to this until we validate the fields from the", "\"\"\"Home Page We have two routes - / and /<int:page> If we're giving", "footer_includes = load_plugins() main = Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login',", "the password field has no data, don't change the user's password obj.password =", "plugs: html = plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True", "UploadNotAllowed: # If no picture is passed, don't crash pass if 'banner' in", "in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success')", "defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to create", "article id is given we will create a new article, Otherwise we edit", "at the given id\"\"\" isNew = not id if isNew: obj = models.Article()", "= uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: # If no picture is passed,", "LoginForm(request.form) # Handle logging in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You", "an article If no article id is given we will create a new", "return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user()", "YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home'))", "methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to create or edit an article If", "models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with a given id\"\"\" article =", "loaded plugins \"\"\" from dateutil import parser import models from flask import Blueprint,", "if 'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed:", "login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins() main", "/>') html = markdowner.convert(markdown) # Run any plugins on our html before passing", "render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to change YAuB", "form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html',", "@main.route('/logout') @login_required def logout(): logout_user() flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\",", "obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form,", "passed, don't crash pass if 'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner", "len(form.password.data) == 0: # If the password field has no data, don't change", "the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj =", "obj = models.Config() form = ConfigForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj)", "rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article at a", "if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\")", "'imgcap' in request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: #", "fields from the form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the", "= ConfigForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set", "# If no picture is passed, don't crash pass obj.published = parser.parse(obj.published) if", "header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main", "methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article at a given id\"\"\" obj", "page to show loaded plugins \"\"\" from dateutil import parser import models from", "[ (a.tag, a.rowid) for a in models.ArticleTag.query.filter( models.ArticleTag.articleid == int(id) ).order_by('tag') ] else:", "not id if isNew: obj = models.Article() else: obj = models.getArticle(int(id)) obj.author =", "models from flask import Blueprint, flash, redirect, render_template, request, url_for from flask.ext.login import", "when accessing the YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup", "models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author():", "markdown = article.content markdown = markdown.replace('\\\\n', '<br />') html = markdowner.convert(markdown) # Run", "form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html',", "form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return", "load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) # Handle", "= AuthorForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully", "accessing YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return", "render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You are logged out.') return", "the template for plug in plugs: html = plug.run(html) return render_template( 'article.html', header_includes=header_includes,", "If no picture is passed, don't crash pass if 'banner' in request.files: try:", "obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form,", "models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models)", "methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup when accessing YAuB for the first", "user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required", "flash('Successfully created user') return redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET',", "have a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, entries=models.getArticlesWithTag(id, 10), models=models, sidebar=True", "'POST']) def login(): form = LoginForm(request.form) # Handle logging in if request.method ==", "form = AuthorForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit()", "def initial_setup_blog(): \"\"\"Initial blog setup when accessing the YAuB for the first time\"\"\"", "models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an article at a given", "given id\"\"\" isNew = not id if isNew: obj = models.Article() else: obj", "return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id):", "redirect(url_for(\"main.initial_setup_blog\")) else: flash_errors(form) return render_template('firstrun_author.html', form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog():", "flash_errors(form) return render_template('admin_blog.html', form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST'])", "until we validate the fields from the form password = models.getAuthor(int(current_user.rowid)).password form =", "out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page We have", "@login_required def logout(): logout_user() flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page':", "a given id\"\"\" article = models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n', '<br", "redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models)", "obj.imagecap = filename except UploadNotAllowed: # If no picture is passed, don't crash", "id\"\"\" isNew = not id if isNew: obj = models.Article() else: obj =", "if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings')", "id\"\"\" article = models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n', '<br />') html", "else: form.tags.choices = [] if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if 'imgcap'", "to change YAuB settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj) # populate the", "'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id): \"\"\"Page to create or edit an", "If no picture is passed, don't crash pass obj.published = parser.parse(obj.published) if isNew:", "'POST']) def initial_setup_user(): \"\"\"Initial user setup when accessing YAuB for the first time\"\"\"", "articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog',", "change the user's password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user", "@main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) #", "markdown2 import Markdown from utils import flash_errors, load_plugins markdowner = Markdown() login_manager =", "admin_article(id): \"\"\"Page to create or edit an article If no article id is", "= models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the form with our blog data", "or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def", "@main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when accessing the YAuB for", "# If no picture is passed, don't crash pass if 'banner' in request.files:", "is passed, don't crash pass if 'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner'])", "@login_required def admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on to", "html = markdowner.convert(markdown) # Run any plugins on our html before passing it", "if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form =", "models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST'])", "given id\"\"\" article = models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n', '<br />')", "given id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return", "login_required, login_user, logout_user) from flask_uploads import IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm,", "\"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj) if", "and /<int:page> If we're giving a specific page to load, get articles for", "request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\"))", "models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required", "obj = models.Author() form = AuthorForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj)", "methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup when accessing the YAuB for the", "models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj)", "load_plugins markdowner = Markdown() login_manager = LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes,", "data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings')", "form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with a given id\"\"\" article", "models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) # Handle logging in", "page to load, get articles for that page Otherwise, load page 1\"\"\" if", "LoginManager() uploaded_photos = UploadSet('photos', IMAGES) plugs, header_includes, footer_includes = load_plugins() main = Blueprint('main',", "= models.Article() else: obj = models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj) if", "entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage) @main.route('/firstrun/blog', methods=['GET', 'POST']) def initial_setup_blog(): \"\"\"Initial blog setup", "the user's password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info')", "article=article, models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page but only", "redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1}) def home(page): \"\"\"Home Page We have two routes", "with our blog data if request.method == \"POST\" and form.validate(): form.populate_obj(obj) if len(form.password.data)", "don't crash pass if 'banner' in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner =", "on our html before passing it to the template for plug in plugs:", "\"POST\" and form.validate(): form.populate_obj(obj) models.db.session.commit() flash('Successfully editted blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form)", "return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\" obj =", "the main page but only shows articles that have a given tag\"\"\" return", "form=form, models=models) @main.route(\"/admin/article\", defaults={'id': None}, methods=['GET', 'POST']) @main.route(\"/admin/article/<id>\", methods=['GET', 'POST']) @login_required def admin_article(id):", "if not isNew: # Bootstrap-TagsInput hooks into a select multiple field form.tags.choices =", "no data, don't change the user's password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit()", "if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form =", "\"\"\" Main webapp logic All setup config and endpoint definitions are stored here", "from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown from utils", "def admin_author(): \"\"\"Updates author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on to this", "in request.files: try: filename = uploaded_photos.save(request.files['banner']) obj.banner = filename except UploadNotAllowed: # If", "# Run any plugins on our html before passing it to the template", "= Blueprint('main', __name__) @login_manager.user_loader def load_user(id): return models.getAuthor(int(id)) @main.route('/login', methods=['GET', 'POST']) def login():", "flash('Successfully editted article') return redirect(url_for(\"main.home\")) return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST'])", "Bootstrap-TagsInput hooks into a select multiple field form.tags.choices = [ (a.tag, a.rowid) for", "form password = models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the form with our", "redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article", "'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return render_template(\"login.html\", form=form,", "no picture is passed, don't crash pass if 'banner' in request.files: try: filename", "id\"\"\" obj = models.getArticle(int(id)) models.updateTags(None, int(id)) models.db.session.delete(obj) models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\"))", "deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates author info\"\"\"", "flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author',", "else: flash_errors(form) return render_template('admin_author.html', form=form, models=models) @main.route(\"/article/<id>\") def article(id): \"\"\"Display an article with", "return render_template('admin_article.html', form=form, rowid=id, models=models) @main.route(\"/admin/delete/<id>\", methods=['GET', 'POST']) @login_required def admin_delete(id): \"\"\"Deletes an", "ConfigForm, LoginForm from markdown2 import Markdown from utils import flash_errors, load_plugins markdowner =", "request.files: try: filename = uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: # If no", "passed, don't crash pass obj.published = parser.parse(obj.published) if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid)", "form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit() flash('Successfully set blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return", "redirect(url_for('main.home')) obj = models.Config() form = ConfigForm(obj=obj) if request.method == \"POST\" and form.validate():", "models.db.session.commit() flash('Successfully deleted article') return redirect(url_for(\"main.home\")) @main.route(\"/admin/author\", methods=['GET', 'POST']) @login_required def admin_author(): \"\"\"Updates", "for plug in plugs: html = plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html,", "AuthorForm(obj=obj) # populate the form with our blog data if request.method == \"POST\"", "for that page Otherwise, load page 1\"\"\" if not models.hasSetupRun(): return redirect(url_for('main.initial_setup_user')) articlesPerPage", "We have two routes - / and /<int:page> If we're giving a specific", "request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash(\"You are logged in.\", 'success') redirect_url =", "IMAGES, UploadNotAllowed, UploadSet from forms import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import", "form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You are logged out.') return redirect(url_for('main.home'))", "initial_setup_user(): \"\"\"Initial user setup when accessing YAuB for the first time\"\"\" if models.hasSetupRun():", "dateutil import parser import models from flask import Blueprint, flash, redirect, render_template, request,", "for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj", "If no article id is given we will create a new article, Otherwise", "if isNew: models.db.session.add(obj) models.db.session.flush() models.updateTags(obj.tags, obj.rowid) models.db.session.commit() flash('Successfully editted article') return redirect(url_for(\"main.home\")) return", "= current_user.rowid form = ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks into a", "password obj.password = password else: obj.set_password(form.password.data) models.db.session.commit() flash('Successfully editted user info') return redirect(url_for(\"main.home\"))", "except UploadNotAllowed: # If no picture is passed, don't crash pass obj.published =", "time\"\"\" if models.hasSetupRun(): \"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form", "TODO:: allow user creation .. TODO:: page to show loaded plugins \"\"\" from", "'POST']) @login_required def admin_article(id): \"\"\"Page to create or edit an article If no", "\"\"\"Only run setup once\"\"\" return redirect(url_for('main.home')) obj = models.Author() form = AuthorForm(obj=obj) if", "models.getAuthor(int(current_user.rowid)).password form = AuthorForm(obj=obj) # populate the form with our blog data if", "models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\")", "models.Config() form = ConfigForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) models.db.session.add(obj) models.db.session.commit()", "render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST']) def initial_setup_user(): \"\"\"Initial user setup when accessing", "populate the form with our blog data if request.method == \"POST\" and form.validate():", "blog settings') return redirect(url_for(\"main.home\")) else: flash_errors(form) return render_template('admin_blog.html', form=form, firstrun=True) @main.route('/firstrun/author', methods=['GET', 'POST'])", "AuthorForm(obj=obj) if request.method == \"POST\" and form.validate(): form.populate_obj(obj) obj.set_password(form.password.data) models.db.session.add(obj) models.db.session.commit() flash('Successfully created", "models=models, sidebar=True ) @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page but only shows", "models.nextPage(page, articlesPerPage) return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, models=models, entries=models.getArticlesForPage(page, articlesPerPage), sidebar=True, pageNumber=int(page), nextPage=nextPage)", "models.Article() else: obj = models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj) if not", "in plugs: html = plug.run(html) return render_template( 'article.html', header_includes=header_includes, footer_includes=footer_includes, html=html, article=article, models=models,", "obj = models.Article() else: obj = models.getArticle(int(id)) obj.author = current_user.rowid form = ArticleForm(obj=obj)", "that have a given tag\"\"\" return render_template( 'home.html', header_includes=header_includes, footer_includes=footer_includes, entries=models.getArticlesWithTag(id, 10), models=models,", "no article id is given we will create a new article, Otherwise we", "def logout(): logout_user() flash('You are logged out.') return redirect(url_for('main.home')) @main.route(\"/<int:page>\") @main.route(\"/\", defaults={'page': 1})", "to create or edit an article If no article id is given we", ") @main.route(\"/tag/<id>\") def tag(id): \"\"\"Loads the main page but only shows articles that", "logged in.\", 'success') redirect_url = request.args.get(\"next\") or url_for(\"main.home\") return redirect(redirect_url) else: flash_errors(form) return", "uploaded_photos.save(request.files['imgcap']) obj.imagecap = filename except UploadNotAllowed: # If no picture is passed, don't", "return render_template(\"login.html\", form=form, models=models) @main.route('/logout') @login_required def logout(): logout_user() flash('You are logged out.')", "setup when accessing the YAuB for the first time\"\"\" if models.hasSetupRun(): \"\"\"Only run", "any plugins on our html before passing it to the template for plug", "author info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on to this until we validate", "show loaded plugins \"\"\" from dateutil import parser import models from flask import", "have two routes - / and /<int:page> If we're giving a specific page", "form=form, firstrun=True) @main.route(\"/admin/settings\", methods=['GET', 'POST']) @login_required def admin_blog(): \"\"\"Page to change YAuB settings\"\"\"", "article = models.getArticle(id) markdown = article.content markdown = markdown.replace('\\\\n', '<br />') html =", "def tag(id): \"\"\"Loads the main page but only shows articles that have a", "change YAuB settings\"\"\" obj = models.getConfigObj() form = ConfigForm(obj=obj) # populate the form", "obj = models.getConfigObj() form = ConfigForm(obj=obj) # populate the form with our blog", "info\"\"\" obj = models.getAuthor(int(current_user.rowid)) # Hold on to this until we validate the", "import ArticleForm, AuthorForm, ConfigForm, LoginForm from markdown2 import Markdown from utils import flash_errors,", "obj.author = current_user.rowid form = ArticleForm(obj=obj) if not isNew: # Bootstrap-TagsInput hooks into", "we will create a new article, Otherwise we edit the article at the" ]
[ "= x - 1 time.sleep(1) #waiting for clearing of serial buffer # display", "TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "not capture: print \"Error opening capture device\" sys.exit(1) faceBeep = 0 #Used for", "following disclaimer in the documentation and/or other materials provided with the distribution. *", "promote products derived from this software without specific prior written permission. * Source", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED", "capture: print \"Error opening capture device\" sys.exit(1) faceBeep = 0 #Used for beeping", "cv #importing opencv for face detection import time #for sleep function import serial", "device is OK if not capture: print \"Error opening capture device\" sys.exit(1) faceBeep", "parameters are tweaked to RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking", "materials provided with the distribution. * Neither the name of the copyright holders", "AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "face is found multipleFaces = 0 #for indicating that multiple faces multipleFacesInit =", "0 #for indicating that multiple faces multipleFacesInit = 0 forward = 0 left", "face detection. The code also handles false faces that may creep in. 'S'", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "want second capture device(USB webcam), use i for the capure device /dev/videoi capture", "faceNotDetected = 0 multipleFacesInit = 1 #Realign to initial position of multiple face", "that the following conditions are met: * Redistributions of source code must retain", "DAMAGE. Software released under Creative Commence cc by-nc-sa licence. For legal information refer", "'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if", "webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image # create capture device", "height to 480 #If you want to capture at native resolution of the", "OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS OR BUSINESS", "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR", "> 0: ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing of", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY", "time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected", "and multipleFaces == 1: faceNotDetected = 0 multipleFacesInit = 1 #Realign to initial", "check if capture device is OK if not capture: print \"Error opening capture", "under Creative Commence cc by-nc-sa licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\"", "use permission form the author needs to be taken. THIS SOFTWARE IS PROVIDED", "multipleFacesInit = 0 forward = 0 left = 0 right = 0 while", "if len(faces) > 0: firstFaceDetected = 1 #multiple faces if len(faces) > 1:", "function import serial #importing pyserial for serial communication count = 0 # configure", "= 0 while 1: # do forever # capture the current frame frame", "face detection import time #for sleep function import serial #importing pyserial for serial", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "to face the enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left =", "without modification, are permitted provided that the following conditions are met: * Redistributions", "ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected = 0 else: #scouting if", "- 1 time.sleep(1) #waiting for clearing of serial buffer #Realign to initial orientation", "initial position of multiple face detection. while forward > 0: ser.write('R'+'\\r\\n') forward =", "----> Move Fast Left ********************************************************************************/\"\"\" import sys #importing system for handling signals for", "#handling false detection if falseDetection > 10: falseDetection = 0 firstFaceDetected = 0", "of the web cam don't set obove width and height parameters, the processing", "of multiple face detection. if left > right: x = left - right", "cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480 #If you want to capture at", "+ float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right + 1 else: faceBeep =", "baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting size of the", "cv.WaitKey(10) if k == 0x1b: # ESC print 'ESC pressed. Exiting ...' break", "tweaking your parameters. print faces #printing the rectangles circumscribing the face in the", "above copyright notice, this list of conditions and the following disclaimer. * Redistributions", "ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing of serial buffer", "All rights reserved. Redistribution and use in source and binary forms, with or", "ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected = 0 else: #scouting if firstFaceDetected ==", "capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT,", "the documentation and/or other materials provided with the distribution. * Neither the name", "serial #importing pyserial for serial communication count = 0 # configure the serial", "#creating autosizable windows for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows", "= 0 #For false detection faceNotDetected = 0 #used for checking if no", "----> Move Left 'T' ----> Move Right 'Q' ----> Move Forward 'R' ---->", "from this software without specific prior written permission. * Source code can be", "SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the", "detection faces = detect(frame) if len(faces) > 0: firstFaceDetected = 1 #multiple faces", "cv.GetSize(image) # create grayscale version grayscale = cv.CreateImage(image_size, 8, 1) #creating a blank", "ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected =", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA,", "if capture device is OK if not capture: print \"Error opening capture device\"", "device = 1 # assume we want second capture device(USB webcam), use i", "firstFaceDetected = 0 else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else:", "The code also handles false faces that may creep in. 'S' ----> Move", "= 0 while x > 0: ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1)", "i.e. handling webcam with arbitrary resolution image_size = cv.GetSize(image) # create grayscale version", "#creating a blank image with the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying", "communication count = 0 # configure the serial connections (the parameters differs on", "device(USB webcam), use i for the capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture,", "one second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import sys #importing system for", "if multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit = 0 #algining itself to", "faceBeep = 0 #Used for beeping control firstFaceDetected = 0 #Used for face", "the device you are connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The port where", "= 1 ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep = 0 #Single face", "resolutions # check if capture device is OK if not capture: print \"Error", "= 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep =", "if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces == 0: falseDetection", "left - right left = 0 right = 0 while x > 0:", "we want second capture device(USB webcam), use i for the capure device /dev/videoi", "port where the serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS )", "----> Move Right 'Q' ----> Move Forward 'R' ----> Move Backward 'B' ---->", "Left 'T' ----> Move Right 'Q' ----> Move Forward 'R' ----> Move Backward", "autosizable windows for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for", "detection faceNotDetected = 0 #used for checking if no face is found multipleFaces", "#retracing to position and orientation of multiple face detection faceNotDetected = faceNotDetected +", "to initial orientation of multiple face detection. if left > right: x =", "print \"Press ESC to exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable", "0: ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1) #waiting for clearing of serial", "#waiting for clearing of serial buffer #Realign to initial orientation of multiple face", "if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left + 1 else:", "0 #Single face detected else: if multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit", "must reproduce the above copyright notice, this list of conditions and the following", "name of the copyright holders nor the names of contributors may be used", "> 0: ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1) #waiting for clearing of", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "#for indicating that multiple faces multipleFacesInit = 0 forward = 0 left =", "be taken. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "= 0 #Used for beeping control firstFaceDetected = 0 #Used for face detection", "\"\"\"************************************************************************************************** Platform: Python 2.x and 2.x.x Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME>", "image cv.ShowImage('Raw', frame) # handle events k = cv.WaitKey(10) if k == 0x1b:", "of multiple face detection. while forward > 0: ser.write('R'+'\\r\\n') forward = forward -", "faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left + 1 else: if", "(0, 255, 0), 3, 8, 0) return faces if __name__ == \"__main__\": print", "> 0: firstFaceDetected = 1 #multiple faces if len(faces) > 1: multipleFacesInit =", "= 0 left = 0 while x > 0: ser.write('S'+'\\r\\n') x = x", "documentation and/or other materials provided with the distribution. * Neither the name of", "10: falseDetection = 0 firstFaceDetected = 0 #retracing to position and orientation of", "time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected = 0 else: #scouting if firstFaceDetected", "Commence cc by-nc-sa licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This", "detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade,", "multipleFaces = 0 #for indicating that multiple faces multipleFacesInit = 0 forward =", "left right = 0 left = 0 while x > 0: ser.write('S'+'\\r\\n') x", "with the distribution. * Neither the name of the copyright holders nor the", "written permission. * Source code can be used for academic purpose. For commercial", "falseDetection + 1 #handling false detection if falseDetection > 10: falseDetection = 0", "refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing for face detection.", "1: faceNotDetected = 0 multipleFacesInit = 1 #Realign to initial position of multiple", "if falseDetection > 10: falseDetection = 0 firstFaceDetected = 0 #retracing to position", "for handling signals for exit import cv #importing opencv for face detection import", "video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces #printing", "- 1 time.sleep(1) #waiting for clearing of serial buffer else: x = right", "This code does image processing for face detection. The code also handles false", "#importing pyserial for serial communication count = 0 # configure the serial connections", "if multipleFaces == 0: firstFaceDetected = 0 else: #scouting if firstFaceDetected == 0:", "+ float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left + 1 else: if faces[0][0][0]", "NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image #These parameters are tweaked to", "ser.write('S'+'\\r\\n') left = left + 1 else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380:", "the copyright holders nor the names of contributors may be used to endorse", "names of contributors may be used to endorse or promote products derived from", "= faceNotDetected + 1 if faceNotDetected > 10 and multipleFaces == 1: faceNotDetected", "1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT Bombay. -*- c", "in source and binary forms, with or without modification, are permitted provided that", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS", "+ i[0][2], i[0][1] + i[0][3]), (0, 255, 0), 3, 8, 0) return faces", "right: x = left - right left = 0 right = 0 while", "0 multipleFacesInit = 0 #algining itself to face the enemy if faces[0][0][0] +", "ERTS Lab, IIT Bombay. -*- c -*- All rights reserved. Redistribution and use", "to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480 #If you want", "for beeping control firstFaceDetected = 0 #Used for face detection falseDetection = 0", "webcam with arbitrary resolution image_size = cv.GetSize(image) # create grayscale version grayscale =", "#loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting", "device\" sys.exit(1) faceBeep = 0 #Used for beeping control firstFaceDetected = 0 #Used", "OK if not capture: print \"Error opening capture device\" sys.exit(1) faceBeep = 0", "false faces that may creep in. 'S' ----> Move Left 'T' ----> Move", "generic image resolution, i.e. handling webcam with arbitrary resolution image_size = cv.GetSize(image) #", "0 while x > 0: ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1) #waiting", "image with the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and", "Beep for one second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import sys #importing", "and white version of the image into the blank image # create storage", "GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER", ") ser.open() ser.isOpen() def detect(image): #Getting size of the image for handling generic", "the above copyright notice, this list of conditions and the following disclaimer in", "CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "modification, are permitted provided that the following conditions are met: * Redistributions of", "= detect(frame) if len(faces) > 0: firstFaceDetected = 1 #multiple faces if len(faces)", "capture device\" sys.exit(1) faceBeep = 0 #Used for beeping control firstFaceDetected = 0", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "# capture the current frame frame = cv.QueryFrame(capture) if frame is None: continue", "copyright holders nor the names of contributors may be used to endorse or", "faces multipleFacesInit = 0 forward = 0 left = 0 right = 0", "to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces #printing the rectangles circumscribing the", "Python 2.x and 2.x.x Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /********************************************************************************", "Left ********************************************************************************/\"\"\" import sys #importing system for handling signals for exit import cv", "USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "derived from this software without specific prior written permission. * Source code can", "Move Backward 'B' ----> Beep for one second 'Z' ----> Move Fast Left", "image cv.ShowImage('Processed', grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade", "is found multipleFaces = 0 #for indicating that multiple faces multipleFacesInit = 0", "web cam don't set obove width and height parameters, the processing speed will", "HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "opencv for face detection import time #for sleep function import serial #importing pyserial", "and the following disclaimer in the documentation and/or other materials provided with the", "260: ser.write('S'+'\\r\\n') left = left + 1 else: if faces[0][0][0] + float(faces[0][0][2]/2) >", "> 10 and multipleFaces == 1: faceNotDetected = 0 multipleFacesInit = 1 #Realign", "while forward > 0: ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1) #waiting for", "> 3: faceBeep = 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1)", "grayscale version grayscale = cv.CreateImage(image_size, 8, 1) #creating a blank image with the", "orientation of multiple face detection. if left > right: x = left -", "falseDetection > 10: falseDetection = 0 firstFaceDetected = 0 #retracing to position and", "code does image processing for face detection. The code also handles false faces", "1 #alarming for detected enemy if faceBeep > 3: faceBeep = 0 print", "beeping control firstFaceDetected = 0 #Used for face detection falseDetection = 0 #For", "1 #handling false detection if falseDetection > 10: falseDetection = 0 firstFaceDetected =", "the following disclaimer in the documentation and/or other materials provided with the distribution.", "notice, this list of conditions and the following disclaimer in the documentation and/or", "nor the names of contributors may be used to endorse or promote products", "left = 0 while x > 0: ser.write('S'+'\\r\\n') x = x - 1", "Redistributions of source code must retain the above copyright notice, this list of", "# check if capture device is OK if not capture: print \"Error opening", "else: faceBeep = faceBeep + 1 #alarming for detected enemy if faceBeep >", "with or without modification, are permitted provided that the following conditions are met:", "resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and white version of the image", "ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces == 0: falseDetection = falseDetection + 1", "detection. The code also handles false faces that may creep in. 'S' ---->", "resolution of the web cam don't set obove width and height parameters, the", "1 faceBeep = 0 #Single face detected else: if multipleFacesInit == 1: multipleFaces", "Platform: Python 2.x and 2.x.x Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/", "refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces #printing the rectangles circumscribing", "the capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width", "0: falseDetection = falseDetection + 1 #handling false detection if falseDetection > 10:", "image resolution, i.e. handling webcam with arbitrary resolution image_size = cv.GetSize(image) # create", "0 # configure the serial connections (the parameters differs on the device you", "image # create capture device device = 1 # assume we want second", "cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480 #If you want to capture", "(c) 2010, ERTS Lab, IIT Bombay. -*- c -*- All rights reserved. Redistribution", "reproduce the above copyright notice, this list of conditions and the following disclaimer", "# face detection faces = detect(frame) if len(faces) > 0: firstFaceDetected = 1", "********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing for face detection. The code also", "i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]), (0,", "= right - left right = 0 left = 0 while x >", "events k = cv.WaitKey(10) if k == 0x1b: # ESC print 'ESC pressed.", "multipleFacesInit = 1 #Realign to initial position of multiple face detection. while forward", "= right + 1 else: faceBeep = faceBeep + 1 #alarming for detected", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,", "the name of the copyright holders nor the names of contributors may be", "0 firstFaceDetected = 0 #retracing to position and orientation of multiple face detection", "if not capture: print \"Error opening capture device\" sys.exit(1) faceBeep = 0 #Used", "\"batu\\n\" else: if multipleFaces == 0: falseDetection = falseDetection + 1 #handling false", "for handling generic image resolution, i.e. handling webcam with arbitrary resolution image_size =", "Redistribution and use in source and binary forms, with or without modification, are", "needs to be taken. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image # create capture device device =", "#Used for beeping control firstFaceDetected = 0 #Used for face detection falseDetection =", "#Realign to initial position of multiple face detection. while forward > 0: ser.write('R'+'\\r\\n')", "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]), (0, 255, 0), 3,", "for processed image # create capture device device = 1 # assume we", "faceBeep = 0 #Single face detected else: if multipleFacesInit == 1: multipleFaces =", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR", "may creep in. 'S' ----> Move Left 'T' ----> Move Right 'Q' ---->", "set obove width and height parameters, the processing speed will be slower for", "multiple face detection faceNotDetected = faceNotDetected + 1 if faceNotDetected > 10 and", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "image #These parameters are tweaked to RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html", "(i[0][0] + i[0][2], i[0][1] + i[0][3]), (0, 255, 0), 3, 8, 0) return", "Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT", "binary forms, with or without modification, are permitted provided that the following conditions", "# create storage storage = cv.CreateMemStorage(0) #creating required storage for face detection #", "capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480 #If", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Software released under", "face detection faces = detect(frame) if len(faces) > 0: firstFaceDetected = 1 #multiple", "# detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale,", "'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import sys #importing system for handling signals", "in the image #drawing rectangles around the faces in the image if faces:", "1 if faceNotDetected > 10 and multipleFaces == 1: faceNotDetected = 0 multipleFacesInit", "differs on the device you are connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The", "checking if no face is found multipleFaces = 0 #for indicating that multiple", "are met: * Redistributions of source code must retain the above copyright notice,", "IIT Bombay. -*- c -*- All rights reserved. Redistribution and use in source", "count = 0 # configure the serial connections (the parameters differs on the", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "the face in the image #drawing rectangles around the faces in the image", "#Used for face detection falseDetection = 0 #For false detection faceNotDetected = 0", "= 1 #Realign to initial position of multiple face detection. while forward >", "for clearing of serial buffer # display webcam image cv.ShowImage('Raw', frame) # handle", "#setting capture height to 480 #If you want to capture at native resolution", "capture the current frame frame = cv.QueryFrame(capture) if frame is None: continue #", "Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS", "= cv.CreateImage(image_size, 8, 1) #creating a blank image with the given image's resolution", "= cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2,", "system for handling signals for exit import cv #importing opencv for face detection", "opening capture device\" sys.exit(1) faceBeep = 0 #Used for beeping control firstFaceDetected =", "\"__main__\": print \"Press ESC to exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating", "faceNotDetected + 1 if faceNotDetected > 10 and multipleFaces == 1: faceNotDetected =", "1 time.sleep(1) #waiting for clearing of serial buffer else: x = right -", "white version of the image into the blank image # create storage storage", "information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing for face", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left + 1", "faceBeep = 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1)", "left > right: x = left - right left = 0 right =", "faceNotDetected > 10 and multipleFaces == 1: faceNotDetected = 0 multipleFacesInit = 1", "left + 1 else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right =", "'T' ----> Move Right 'Q' ----> Move Forward 'R' ----> Move Backward 'B'", "slower for larger image resolutions # check if capture device is OK if", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES", "your parameters. print faces #printing the rectangles circumscribing the face in the image", "if k == 0x1b: # ESC print 'ESC pressed. Exiting ...' break #Exiting", "frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image # create", "without specific prior written permission. * Source code can be used for academic", "bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting size of the image for handling", "cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and white version of the image into", "for serial communication count = 0 # configure the serial connections (the parameters", "ser.open() ser.isOpen() def detect(image): #Getting size of the image for handling generic image", "\"Press ESC to exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows", "conditions and the following disclaimer in the documentation and/or other materials provided with", "SERVICES LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "in the documentation and/or other materials provided with the distribution. * Neither the", "= cv.CreateMemStorage(0) #creating required storage for face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale)", "the image if faces: for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] +", "(i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]), (0, 255, 0), 3, 8,", "# create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame from webcam", "at native resolution of the web cam don't set obove width and height", "will be slower for larger image resolutions # check if capture device is", "multipleFacesInit = 0 #algining itself to face the enemy if faces[0][0][0] + float(faces[0][0][2]/2)", "Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT Bombay.", "the faces in the image if faces: for i in faces: cv.Rectangle(image, (i[0][0],", "serial connections (the parameters differs on the device you are connecting to) ser", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Software released under Creative", "false detection if falseDetection > 10: falseDetection = 0 firstFaceDetected = 0 #retracing", "if len(faces) > 1: multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward =", "with arbitrary resolution image_size = cv.GetSize(image) # create grayscale version grayscale = cv.CreateImage(image_size,", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "= 0 # configure the serial connections (the parameters differs on the device", "0), 3, 8, 0) return faces if __name__ == \"__main__\": print \"Press ESC", "image for handling generic image resolution, i.e. handling webcam with arbitrary resolution image_size", "list of conditions and the following disclaimer in the documentation and/or other materials", "+ 1 else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right", "faces if __name__ == \"__main__\": print \"Press ESC to exit ...\" # create", "windows for processed image # create capture device device = 1 # assume", "#creating autosizable windows for processed image # create capture device device = 1", "= 0 while x > 0: ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1)", "* Redistributions of source code must retain the above copyright notice, this list", "a blank image with the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the", "OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "0: firstFaceDetected = 0 else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\"", "1: multipleFaces = 0 multipleFacesInit = 0 #algining itself to face the enemy", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN", "face the enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left", "provided with the distribution. * Neither the name of the copyright holders nor", "pyserial for serial communication count = 0 # configure the serial connections (the", "other materials provided with the distribution. * Neither the name of the copyright", "false detection faceNotDetected = 0 #used for checking if no face is found", "create grayscale version grayscale = cv.CreateImage(image_size, 8, 1) #creating a blank image with", "#waiting for clearing of serial buffer # display webcam image cv.ShowImage('Raw', frame) #", "author needs to be taken. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,", "serial communication count = 0 # configure the serial connections (the parameters differs", "= cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image", "x > 0: ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing", "EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "cv.ShowImage('Processed', grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces", "present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting size of", "device device = 1 # assume we want second capture device(USB webcam), use", "purpose. For commercial use permission form the author needs to be taken. THIS", "detection. if left > right: x = left - right left = 0", "falseDetection = falseDetection + 1 #handling false detection if falseDetection > 10: falseDetection", "webcam image cv.ShowImage('Raw', frame) # handle events k = cv.WaitKey(10) if k ==", "Copyright (c) 2010, ERTS Lab, IIT Bombay. -*- c -*- All rights reserved.", "be used to endorse or promote products derived from this software without specific", "for detected enemy if faceBeep > 3: faceBeep = 0 print 'Beeping for", "frame is None: continue # mirror #cv.Flip(frame, None, 1) # face detection faces", "#Realign to initial orientation of multiple face detection. if left > right: x", "serial buffer # display webcam image cv.ShowImage('Raw', frame) # handle events k =", "are permitted provided that the following conditions are met: * Redistributions of source", "for one second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import sys #importing system", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF", "licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image", "faceBeep > 3: faceBeep = 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n')", "the image for handling generic image resolution, i.e. handling webcam with arbitrary resolution", "cv.CreateMemStorage(0) #creating required storage for face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) #", "device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640", "print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n')", "by-nc-sa licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does", "# handle events k = cv.WaitKey(10) if k == 0x1b: # ESC print", "don't set obove width and height parameters, the processing speed will be slower", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY", "THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "********************************************************************************/\"\"\" import sys #importing system for handling signals for exit import cv #importing", "required storage for face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Software released", "of conditions and the following disclaimer. * Redistributions in binary form must reproduce", "else: x = right - left right = 0 left = 0 while", "ser = serial.Serial( port='/dev/ttyUSB0', #The port where the serial communication usb is present.", "to exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "= left + 1 else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right", "serial buffer else: x = right - left right = 0 left =", "width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480 #If you", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "taken. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "0 #For false detection faceNotDetected = 0 #used for checking if no face", "Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT Bombay. -*-", "Lab, IIT Bombay. -*- c -*- All rights reserved. Redistribution and use in", "disclaimer in the documentation and/or other materials provided with the distribution. * Neither", "configure the serial connections (the parameters differs on the device you are connecting", "of the image for handling generic image resolution, i.e. handling webcam with arbitrary", "the image into the blank image # create storage storage = cv.CreateMemStorage(0) #creating", "capture device device = 1 # assume we want second capture device(USB webcam),", "storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image #These parameters are", "also handles false faces that may creep in. 'S' ----> Move Left 'T'", "is None: continue # mirror #cv.Flip(frame, None, 1) # face detection faces =", "are connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The port where the serial communication", "8, 1) #creating a blank image with the given image's resolution cv.CvtColor(image, grayscale,", "width and height parameters, the processing speed will be slower for larger image", "the serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen()", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE", "given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and white version of", "NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "must retain the above copyright notice, this list of conditions and the following", "native resolution of the web cam don't set obove width and height parameters,", "#The port where the serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS", "STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "i[0][3]), (0, 255, 0), 3, 8, 0) return faces if __name__ == \"__main__\":", "height parameters, the processing speed will be slower for larger image resolutions #", "while x > 0: ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for", "to endorse or promote products derived from this software without specific prior written", "SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION)", "released under Creative Commence cc by-nc-sa licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode", "are tweaked to RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your", "use i for the capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640)", "disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this", "----> Move Backward 'B' ----> Beep for one second 'Z' ----> Move Fast", "right left = 0 right = 0 while x > 0: ser.write('T'+'\\r\\n') x", "ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep = 0 #Single face detected else:", "1) #creating a blank image with the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY)", "forms, with or without modification, are permitted provided that the following conditions are", "capture device is OK if not capture: print \"Error opening capture device\" sys.exit(1)", "exit import cv #importing opencv for face detection import time #for sleep function", "faces if len(faces) > 1: multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward", "multiple face detection. while forward > 0: ser.write('R'+'\\r\\n') forward = forward - 1", "parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting size of the image", "handling signals for exit import cv #importing opencv for face detection import time", "multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep = 0 #Single", "Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in", "* Neither the name of the copyright holders nor the names of contributors", "float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left + 1 else: if faces[0][0][0] +", "#importing opencv for face detection import time #for sleep function import serial #importing", "0: ser.write('T'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing of serial", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "position of multiple face detection. while forward > 0: ser.write('R'+'\\r\\n') forward = forward", "where the serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open()", "face detection faceNotDetected = faceNotDetected + 1 if faceNotDetected > 10 and multipleFaces", "histogram cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed', grayscale) # detect objects cascade", "2010, ERTS Lab, IIT Bombay. -*- c -*- All rights reserved. Redistribution and", "is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting size", "# display webcam image cv.ShowImage('Raw', frame) # handle events k = cv.WaitKey(10) if", "firstFaceDetected = 0 #retracing to position and orientation of multiple face detection faceNotDetected", "import serial #importing pyserial for serial communication count = 0 # configure the", "OF SUCH DAMAGE. Software released under Creative Commence cc by-nc-sa licence. For legal", "face detection. while forward > 0: ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1)", "= cv.QueryFrame(capture) if frame is None: continue # mirror #cv.Flip(frame, None, 1) #", "enemy if faceBeep > 3: faceBeep = 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n')", "1 ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep = 0 #Single face detected", "- 1 time.sleep(1) #waiting for clearing of serial buffer # display webcam image", "'Q' ----> Move Forward 'R' ----> Move Backward 'B' ----> Beep for one", "and 2.x.x Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c)", "cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed', grayscale) # detect objects cascade =", "windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE)", "OR SERVICES LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "faces = detect(frame) if len(faces) > 0: firstFaceDetected = 1 #multiple faces if", "import time #for sleep function import serial #importing pyserial for serial communication count", "this list of conditions and the following disclaimer. * Redistributions in binary form", "to be taken. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces #printing the rectangles circumscribing the face", "the web cam don't set obove width and height parameters, the processing speed", "DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "faceNotDetected = faceNotDetected + 1 if faceNotDetected > 10 and multipleFaces == 1:", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing of serial buffer", "cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2,", "rectangles circumscribing the face in the image #drawing rectangles around the faces in", "if multipleFaces == 0: falseDetection = falseDetection + 1 #handling false detection if", "'S' ----> Move Left 'T' ----> Move Right 'Q' ----> Move Forward 'R'", "serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def", "captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image #", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR", "1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image #These parameters are tweaked", "of source code must retain the above copyright notice, this list of conditions", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "0 right = 0 while 1: # do forever # capture the current", "OF THE POSSIBILITY OF SUCH DAMAGE. Software released under Creative Commence cc by-nc-sa", "serial buffer #Realign to initial orientation of multiple face detection. if left >", "#printing the rectangles circumscribing the face in the image #drawing rectangles around the", "cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting", "to initial position of multiple face detection. while forward > 0: ser.write('R'+'\\r\\n') forward", "#These parameters are tweaked to RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for", "+ 1 faceBeep = 0 #Single face detected else: if multipleFacesInit == 1:", "SUCH DAMAGE. Software released under Creative Commence cc by-nc-sa licence. For legal information", "0 #used for checking if no face is found multipleFaces = 0 #for", "storage for face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed image", "#used for checking if no face is found multipleFaces = 0 #for indicating", "Source code can be used for academic purpose. For commercial use permission form", "OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "= 0 right = 0 while x > 0: ser.write('T'+'\\r\\n') x = x", "of serial buffer #Realign to initial orientation of multiple face detection. if left", "x = right - left right = 0 left = 0 while x", "detected else: if multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit = 0 #algining", "Move Right 'Q' ----> Move Forward 'R' ----> Move Backward 'B' ----> Beep", "- left right = 0 left = 0 while x > 0: ser.write('S'+'\\r\\n')", "#algining itself to face the enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n')", "len(faces) > 1: multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward", "# assume we want second capture device(USB webcam), use i for the capure", "----> Beep for one second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import sys", "sleep function import serial #importing pyserial for serial communication count = 0 #", "code also handles false faces that may creep in. 'S' ----> Move Left", "ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF", "forward = forward - 1 time.sleep(1) #waiting for clearing of serial buffer #Realign", "the image #These parameters are tweaked to RGB video captures, please refer to", "== 0x1b: # ESC print 'ESC pressed. Exiting ...' break #Exiting the program", "+ 1 if faceNotDetected > 10 and multipleFaces == 1: faceNotDetected = 0", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND", "for face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed',", "Backward 'B' ----> Beep for one second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\"", "import sys #importing system for handling signals for exit import cv #importing opencv", "firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces == 0: falseDetection =", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "parameters differs on the device you are connecting to) ser = serial.Serial( port='/dev/ttyUSB0',", "used for academic purpose. For commercial use permission form the author needs to", "#For false detection faceNotDetected = 0 #used for checking if no face is", "if __name__ == \"__main__\": print \"Press ESC to exit ...\" # create windows", "academic purpose. For commercial use permission form the author needs to be taken.", "to RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print", "OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", "resolution image_size = cv.GetSize(image) # create grayscale version grayscale = cv.CreateImage(image_size, 8, 1)", "0 else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces", "multipleFaces == 0: firstFaceDetected = 0 else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n')", "faces in the image if faces: for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]),", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "cv.ShowImage('Raw', frame) # handle events k = cv.WaitKey(10) if k == 0x1b: #", "COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "# mirror #cv.Flip(frame, None, 1) # face detection faces = detect(frame) if len(faces)", "or without modification, are permitted provided that the following conditions are met: *", "software without specific prior written permission. * Source code can be used for", "ESC to exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for", "forever # capture the current frame frame = cv.QueryFrame(capture) if frame is None:", "10 and multipleFaces == 1: faceNotDetected = 0 multipleFacesInit = 1 #Realign to", "form the author needs to be taken. THIS SOFTWARE IS PROVIDED BY THE", "Fast Left ********************************************************************************/\"\"\" import sys #importing system for handling signals for exit import", "0 forward = 0 left = 0 right = 0 while 1: #", "firstFaceDetected = 1 #multiple faces if len(faces) > 1: multipleFacesInit = 0 multipleFaces", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "multipleFaces = 0 multipleFacesInit = 0 #algining itself to face the enemy if", "0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep = 0", "of the copyright holders nor the names of contributors may be used to", "copyright notice, this list of conditions and the following disclaimer. * Redistributions in", "may be used to endorse or promote products derived from this software without", "the names of contributors may be used to endorse or promote products derived", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "ser.write('T'+'\\r\\n') right = right + 1 else: faceBeep = faceBeep + 1 #alarming", "for face detection falseDetection = 0 #For false detection faceNotDetected = 0 #used", "1 time.sleep(1) #waiting for clearing of serial buffer # display webcam image cv.ShowImage('Raw',", "for clearing of serial buffer #Realign to initial orientation of multiple face detection.", "right - left right = 0 left = 0 while x > 0:", "prior written permission. * Source code can be used for academic purpose. For", "Move Left 'T' ----> Move Right 'Q' ----> Move Forward 'R' ----> Move", "time.sleep(1) #waiting for clearing of serial buffer #Realign to initial orientation of multiple", "conditions are met: * Redistributions of source code must retain the above copyright", "and/or other materials provided with the distribution. * Neither the name of the", "\"\"\"/******************************************************************************* This code does image processing for face detection. The code also handles", "blank image # create storage storage = cv.CreateMemStorage(0) #creating required storage for face", "commercial use permission form the author needs to be taken. THIS SOFTWARE IS", "clearing of serial buffer #Realign to initial orientation of multiple face detection. if", "while x > 0: ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for", "THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE", "OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "> 10: falseDetection = 0 firstFaceDetected = 0 #retracing to position and orientation", "grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces =", "if frame is None: continue # mirror #cv.Flip(frame, None, 1) # face detection", "on the device you are connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The port", "else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right + 1", "#copying the black and white version of the image into the blank image", "current frame frame = cv.QueryFrame(capture) if frame is None: continue # mirror #cv.Flip(frame,", "buffer #Realign to initial orientation of multiple face detection. if left > right:", "frame frame = cv.QueryFrame(capture) if frame is None: continue # mirror #cv.Flip(frame, None,", "of contributors may be used to endorse or promote products derived from this", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF", "or promote products derived from this software without specific prior written permission. *", "image_size = cv.GetSize(image) # create grayscale version grayscale = cv.CreateImage(image_size, 8, 1) #creating", "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS", "2.x and 2.x.x Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "and the following disclaimer. * Redistributions in binary form must reproduce the above", "sys #importing system for handling signals for exit import cv #importing opencv for", "connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The port where the serial communication usb", "port='/dev/ttyUSB0', #The port where the serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE,", "resolution, i.e. handling webcam with arbitrary resolution image_size = cv.GetSize(image) # create grayscale", "x = x - 1 time.sleep(1) #waiting for clearing of serial buffer #", "webcam), use i for the capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH,", "...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame from", "orientation of multiple face detection faceNotDetected = faceNotDetected + 1 if faceNotDetected >", "circumscribing the face in the image #drawing rectangles around the faces in the", "list of conditions and the following disclaimer. * Redistributions in binary form must", "0 #retracing to position and orientation of multiple face detection faceNotDetected = faceNotDetected", "position and orientation of multiple face detection faceNotDetected = faceNotDetected + 1 if", "if left > right: x = left - right left = 0 right", "== 1: multipleFaces = 0 multipleFacesInit = 0 #algining itself to face the", "for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces", "print faces #printing the rectangles circumscribing the face in the image #drawing rectangles", "from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image # create capture", "storage = cv.CreateMemStorage(0) #creating required storage for face detection # equalize histogram cv.EqualizeHist(grayscale,", "faces in the image #These parameters are tweaked to RGB video captures, please", "for larger image resolutions # check if capture device is OK if not", "larger image resolutions # check if capture device is OK if not capture:", "380: ser.write('T'+'\\r\\n') right = right + 1 else: faceBeep = faceBeep + 1", "contributors may be used to endorse or promote products derived from this software", "the serial connections (the parameters differs on the device you are connecting to)", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "if faceNotDetected > 10 and multipleFaces == 1: faceNotDetected = 0 multipleFacesInit =", "parameters. print faces #printing the rectangles circumscribing the face in the image #drawing", "this list of conditions and the following disclaimer in the documentation and/or other", "be used for academic purpose. For commercial use permission form the author needs", "= 0 left = 0 right = 0 while 1: # do forever", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "assume we want second capture device(USB webcam), use i for the capure device", "and height parameters, the processing speed will be slower for larger image resolutions", "* Redistributions in binary form must reproduce the above copyright notice, this list", "Redistributions in binary form must reproduce the above copyright notice, this list of", "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "for face detection import time #for sleep function import serial #importing pyserial for", "LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing", "in the image if faces: for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0]", "-*- c -*- All rights reserved. Redistribution and use in source and binary", "for the capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture", "cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image #These parameters are tweaked to RGB", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "= 0 else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if", "= 0 multipleFacesInit = 1 #Realign to initial position of multiple face detection.", "= x - 1 time.sleep(1) #waiting for clearing of serial buffer else: x", "480) #setting capture height to 480 #If you want to capture at native", "1) # face detection faces = detect(frame) if len(faces) > 0: firstFaceDetected =", "1 #Realign to initial position of multiple face detection. while forward > 0:", "HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Software released under Creative Commence cc", "# equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed', grayscale) # detect", "= 0 forward = 0 left = 0 right = 0 while 1:", "= forward - 1 time.sleep(1) #waiting for clearing of serial buffer #Realign to", "image #drawing rectangles around the faces in the image if faces: for i", "processed image cv.ShowImage('Processed', grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar", "cc by-nc-sa licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code", "cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture", "rights reserved. Redistribution and use in source and binary forms, with or without", "exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame", "you want to capture at native resolution of the web cam don't set", "face detection falseDetection = 0 #For false detection faceNotDetected = 0 #used for", "-*- All rights reserved. Redistribution and use in source and binary forms, with", "found multipleFaces = 0 #for indicating that multiple faces multipleFacesInit = 0 forward", "provided that the following conditions are met: * Redistributions of source code must", "grayscale, cv.CV_BGR2GRAY) #copying the black and white version of the image into the", "IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED", "+ i[0][3]), (0, 255, 0), 3, 8, 0) return faces if __name__ ==", "right = 0 while 1: # do forever # capture the current frame", "the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the", "k == 0x1b: # ESC print 'ESC pressed. Exiting ...' break #Exiting the", "else: if multipleFaces == 0: falseDetection = falseDetection + 1 #handling false detection", "else: if multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit = 0 #algining itself", "< 260: ser.write('S'+'\\r\\n') left = left + 1 else: if faces[0][0][0] + float(faces[0][0][2]/2)", "multipleFaces == 1: faceNotDetected = 0 multipleFacesInit = 1 #Realign to initial position", "around the faces in the image if faces: for i in faces: cv.Rectangle(image,", "face detected else: if multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit = 0", "faceBeep + 1 #alarming for detected enemy if faceBeep > 3: faceBeep =", "== 0: firstFaceDetected = 0 else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print", "used to endorse or promote products derived from this software without specific prior", "1 else: faceBeep = faceBeep + 1 #alarming for detected enemy if faceBeep", "binary form must reproduce the above copyright notice, this list of conditions and", "Creative Commence cc by-nc-sa licence. For legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/*******************************************************************************", "time #for sleep function import serial #importing pyserial for serial communication count =", "# configure the serial connections (the parameters differs on the device you are", "for academic purpose. For commercial use permission form the author needs to be", "= 1 #multiple faces if len(faces) > 1: multipleFacesInit = 0 multipleFaces =", "0 #algining itself to face the enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260:", "capture height to 480 #If you want to capture at native resolution of", "faceBeep = faceBeep + 1 #alarming for detected enemy if faceBeep > 3:", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "black and white version of the image into the blank image # create", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "blank image with the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black", "= cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480)", "image into the blank image # create storage storage = cv.CreateMemStorage(0) #creating required", "handles false faces that may creep in. 'S' ----> Move Left 'T' ---->", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS", "left = 0 right = 0 while 1: # do forever # capture", "x = left - right left = 0 right = 0 while x", "1: multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward + 1", "image processing for face detection. The code also handles false faces that may", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN", "the above copyright notice, this list of conditions and the following disclaimer. *", "= cv.GetSize(image) # create grayscale version grayscale = cv.CreateImage(image_size, 8, 1) #creating a", "faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right + 1 else: faceBeep", "conditions and the following disclaimer. * Redistributions in binary form must reproduce the", "1 # assume we want second capture device(USB webcam), use i for the", "For commercial use permission form the author needs to be taken. THIS SOFTWARE", "> right: x = left - right left = 0 right = 0", "import cv #importing opencv for face detection import time #for sleep function import", "Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab,", "# create capture device device = 1 # assume we want second capture", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Software released under Creative Commence", "processing for face detection. The code also handles false faces that may creep", "retain the above copyright notice, this list of conditions and the following disclaimer.", "1 time.sleep(1) #waiting for clearing of serial buffer #Realign to initial orientation of", "== \"__main__\": print \"Press ESC to exit ...\" # create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE)", "ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF", "to capture at native resolution of the web cam don't set obove width", "= faceBeep + 1 #alarming for detected enemy if faceBeep > 3: faceBeep", "of serial buffer else: x = right - left right = 0 left", "2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT Bombay. -*- c -*-", "BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT", "Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces", "capture device(USB webcam), use i for the capure device /dev/videoi capture = cv.CaptureFromCAM(device)", "image if faces: for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2],", "multiple face detection. if left > right: x = left - right left", "buffer else: x = right - left right = 0 left = 0", "for exit import cv #importing opencv for face detection import time #for sleep", "for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]),", "forward = forward + 1 faceBeep = 0 #Single face detected else: if", "version grayscale = cv.CreateImage(image_size, 8, 1) #creating a blank image with the given", "permission. * Source code can be used for academic purpose. For commercial use", "firstFaceDetected = 0 #Used for face detection falseDetection = 0 #For false detection", "Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces ==", "0 right = 0 while x > 0: ser.write('T'+'\\r\\n') x = x -", "holders nor the names of contributors may be used to endorse or promote", "face detection. if left > right: x = left - right left =", "Neither the name of the copyright holders nor the names of contributors may", "grayscale = cv.CreateImage(image_size, 8, 1) #creating a blank image with the given image's", "#Single face detected else: if multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit =", "== 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces == 0: falseDetection = falseDetection", "#print \"batu\\n\" else: if multipleFaces == 0: falseDetection = falseDetection + 1 #handling", "0 multipleFacesInit = 1 #Realign to initial position of multiple face detection. while", "i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]), (0, 255, 0), 3, 8, 0)", "image resolutions # check if capture device is OK if not capture: print", "LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "ser.isOpen() def detect(image): #Getting size of the image for handling generic image resolution,", "RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces", "= serial.Serial( port='/dev/ttyUSB0', #The port where the serial communication usb is present. baudrate=9600,", "indicating that multiple faces multipleFacesInit = 0 forward = 0 left = 0", "0 while x > 0: ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1) #waiting", "reserved. Redistribution and use in source and binary forms, with or without modification,", "Forward 'R' ----> Move Backward 'B' ----> Beep for one second 'Z' ---->", "if faceBeep > 3: faceBeep = 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1)", "#importing system for handling signals for exit import cv #importing opencv for face", "initial orientation of multiple face detection. if left > right: x = left", "legal information refer to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing for", "\"Error opening capture device\" sys.exit(1) faceBeep = 0 #Used for beeping control firstFaceDetected", "== 1: faceNotDetected = 0 multipleFacesInit = 1 #Realign to initial position of", "face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed', grayscale)", "arbitrary resolution image_size = cv.GetSize(image) # create grayscale version grayscale = cv.CreateImage(image_size, 8,", "in binary form must reproduce the above copyright notice, this list of conditions", "of conditions and the following disclaimer in the documentation and/or other materials provided", "# do forever # capture the current frame frame = cv.QueryFrame(capture) if frame", "while 1: # do forever # capture the current frame frame = cv.QueryFrame(capture)", "faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]), (0, 255, 0),", "tweaked to RGB video captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters.", "(the parameters differs on the device you are connecting to) ser = serial.Serial(", "640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480 #If you want to", "= 0 right = 0 while 1: # do forever # capture the", "capture at native resolution of the web cam don't set obove width and", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS OR", "of serial buffer # display webcam image cv.ShowImage('Raw', frame) # handle events k", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Software", "distribution. * Neither the name of the copyright holders nor the names of", "left = 0 right = 0 while x > 0: ser.write('T'+'\\r\\n') x =", "autosizable windows for processed image # create capture device device = 1 #", "= 0 #retracing to position and orientation of multiple face detection faceNotDetected =", "detection. while forward > 0: ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1) #waiting", "code must retain the above copyright notice, this list of conditions and the", "* Source code can be used for academic purpose. For commercial use permission", "# show processed image cv.ShowImage('Processed', grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading", "def detect(image): #Getting size of the image for handling generic image resolution, i.e.", "= falseDetection + 1 #handling false detection if falseDetection > 10: falseDetection =", "signals for exit import cv #importing opencv for face detection import time #for", "and binary forms, with or without modification, are permitted provided that the following", "this software without specific prior written permission. * Source code can be used", "else: #scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces ==", "clearing of serial buffer else: x = right - left right = 0", "source code must retain the above copyright notice, this list of conditions and", "second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import sys #importing system for handling", "code can be used for academic purpose. For commercial use permission form the", "cv.CreateImage(image_size, 8, 1) #creating a blank image with the given image's resolution cv.CvtColor(image,", "faceNotDetected = 0 #used for checking if no face is found multipleFaces =", "#setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to 480", "create windows cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame from webcam cv.NamedWindow('Processed',", "does image processing for face detection. The code also handles false faces that", "in. 'S' ----> Move Left 'T' ----> Move Right 'Q' ----> Move Forward", "0) return faces if __name__ == \"__main__\": print \"Press ESC to exit ...\"", "#waiting for clearing of serial buffer else: x = right - left right", "THE POSSIBILITY OF SUCH DAMAGE. Software released under Creative Commence cc by-nc-sa licence.", "= 0 #Used for face detection falseDetection = 0 #For false detection faceNotDetected", "0 left = 0 right = 0 while 1: # do forever #", "3, 8, 0) return faces if __name__ == \"__main__\": print \"Press ESC to", "k = cv.WaitKey(10) if k == 0x1b: # ESC print 'ESC pressed. Exiting", "rectangles around the faces in the image if faces: for i in faces:", "creep in. 'S' ----> Move Left 'T' ----> Move Right 'Q' ----> Move", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE", "OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "speed will be slower for larger image resolutions # check if capture device", "version of the image into the blank image # create storage storage =", "None: continue # mirror #cv.Flip(frame, None, 1) # face detection faces = detect(frame)", "processing speed will be slower for larger image resolutions # check if capture", "= 0 #used for checking if no face is found multipleFaces = 0", "right = 0 left = 0 while x > 0: ser.write('S'+'\\r\\n') x =", "sys.exit(1) faceBeep = 0 #Used for beeping control firstFaceDetected = 0 #Used for", "detected enemy if faceBeep > 3: faceBeep = 0 print 'Beeping for Faces'", "endorse or promote products derived from this software without specific prior written permission.", "the following disclaimer. * Redistributions in binary form must reproduce the above copyright", "__name__ == \"__main__\": print \"Press ESC to exit ...\" # create windows cv.NamedWindow('Raw',", "permitted provided that the following conditions are met: * Redistributions of source code", "#drawing rectangles around the faces in the image if faces: for i in", "0: ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing of serial", "to 480 #If you want to capture at native resolution of the web", "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "that may creep in. 'S' ----> Move Left 'T' ----> Move Right 'Q'", "falseDetection = 0 #For false detection faceNotDetected = 0 #used for checking if", "grayscale) # show processed image cv.ShowImage('Processed', grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml')", "clearing of serial buffer # display webcam image cv.ShowImage('Raw', frame) # handle events", "and orientation of multiple face detection faceNotDetected = faceNotDetected + 1 if faceNotDetected", "can be used for academic purpose. For commercial use permission form the author", "in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] + i[0][3]), (0, 255,", "storage storage = cv.CreateMemStorage(0) #creating required storage for face detection # equalize histogram", "> 1: multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward +", "= 0 #Single face detected else: if multipleFacesInit == 1: multipleFaces = 0", "**************************************************************************************************/ /******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT Bombay. -*- c -*- All", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "/******************************************************************************** Copyright (c) 2010, ERTS Lab, IIT Bombay. -*- c -*- All rights", "FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "detection faceNotDetected = faceNotDetected + 1 if faceNotDetected > 10 and multipleFaces ==", "= left - right left = 0 right = 0 while x >", "cv.NamedWindow('Raw', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating", "3: faceBeep = 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n')", "x > 0: ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing", "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "i for the capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting", "AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "show processed image cv.ShowImage('Processed', grayscale) # detect objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the", "detection falseDetection = 0 #For false detection faceNotDetected = 0 #used for checking", "255, 0), 3, 8, 0) return faces if __name__ == \"__main__\": print \"Press", "frame = cv.QueryFrame(capture) if frame is None: continue # mirror #cv.Flip(frame, None, 1)", "to: http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing for face detection. The", "products derived from this software without specific prior written permission. * Source code", "POSSIBILITY OF SUCH DAMAGE. Software released under Creative Commence cc by-nc-sa licence. For", "display webcam image cv.ShowImage('Raw', frame) # handle events k = cv.WaitKey(10) if k", "+ 1 else: faceBeep = faceBeep + 1 #alarming for detected enemy if", "# create grayscale version grayscale = cv.CreateImage(image_size, 8, 1) #creating a blank image", "Move Fast Left ********************************************************************************/\"\"\" import sys #importing system for handling signals for exit", "= cv.WaitKey(10) if k == 0x1b: # ESC print 'ESC pressed. Exiting ...'", "for clearing of serial buffer else: x = right - left right =", "form must reproduce the above copyright notice, this list of conditions and the", "+ 1 #handling false detection if falseDetection > 10: falseDetection = 0 firstFaceDetected", "the distribution. * Neither the name of the copyright holders nor the names", "above copyright notice, this list of conditions and the following disclaimer in the", "and use in source and binary forms, with or without modification, are permitted", "use in source and binary forms, with or without modification, are permitted provided", "permission form the author needs to be taken. THIS SOFTWARE IS PROVIDED BY", "create storage storage = cv.CreateMemStorage(0) #creating required storage for face detection # equalize", "create capture device device = 1 # assume we want second capture device(USB", "serial.Serial( port='/dev/ttyUSB0', #The port where the serial communication usb is present. baudrate=9600, parity=serial.PARITY_NONE,", "the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and white version", "the blank image # create storage storage = cv.CreateMemStorage(0) #creating required storage for", "copyright notice, this list of conditions and the following disclaimer in the documentation", "processed image # create capture device device = 1 # assume we want", "#cv.Flip(frame, None, 1) # face detection faces = detect(frame) if len(faces) > 0:", "#alarming for detected enemy if faceBeep > 3: faceBeep = 0 print 'Beeping", "multipleFaces == 0: falseDetection = falseDetection + 1 #handling false detection if falseDetection", "#for sleep function import serial #importing pyserial for serial communication count = 0", "handle events k = cv.WaitKey(10) if k == 0x1b: # ESC print 'ESC", "cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image #These", "parameters, the processing speed will be slower for larger image resolutions # check", "itself to face the enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR", "falseDetection = 0 firstFaceDetected = 0 #retracing to position and orientation of multiple", "= forward + 1 faceBeep = 0 #Single face detected else: if multipleFacesInit", "of multiple face detection faceNotDetected = faceNotDetected + 1 if faceNotDetected > 10", "2.x.x Title: Border Surveillance Bot Author: 1.<NAME> 2.<NAME> **************************************************************************************************/ /******************************************************************************** Copyright (c) 2010,", "met: * Redistributions of source code must retain the above copyright notice, this", "return faces if __name__ == \"__main__\": print \"Press ESC to exit ...\" #", "windows for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed", "forward - 1 time.sleep(1) #waiting for clearing of serial buffer #Realign to initial", "time.sleep(1) #waiting for clearing of serial buffer else: x = right - left", "equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed', grayscale) # detect objects", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "be slower for larger image resolutions # check if capture device is OK", "0 left = 0 while x > 0: ser.write('S'+'\\r\\n') x = x -", "c -*- All rights reserved. Redistribution and use in source and binary forms,", "cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable", "for tweaking your parameters. print faces #printing the rectangles circumscribing the face in", "detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) # show processed image cv.ShowImage('Processed', grayscale) #", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY", "mirror #cv.Flip(frame, None, 1) # face detection faces = detect(frame) if len(faces) >", "#multiple faces if len(faces) > 1: multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n')", "time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected = 0", "ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "that multiple faces multipleFacesInit = 0 forward = 0 left = 0 right", "for captured frame from webcam cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image", "the image #drawing rectangles around the faces in the image if faces: for", "cv.NamedWindow('Processed', cv.CV_WINDOW_AUTOSIZE) #creating autosizable windows for processed image # create capture device device", "> 0: ser.write('S'+'\\r\\n') x = x - 1 time.sleep(1) #waiting for clearing of", "the current frame frame = cv.QueryFrame(capture) if frame is None: continue # mirror", "----> Move Forward 'R' ----> Move Backward 'B' ----> Beep for one second", "ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected = 0 else:", "ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0:", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO", "cv.QueryFrame(capture) if frame is None: continue # mirror #cv.Flip(frame, None, 1) # face", "the faces in the image #These parameters are tweaked to RGB video captures,", "0 #Used for beeping control firstFaceDetected = 0 #Used for face detection falseDetection", "right + 1 else: faceBeep = faceBeep + 1 #alarming for detected enemy", "#If you want to capture at native resolution of the web cam don't", "= 0 #algining itself to face the enemy if faces[0][0][0] + float(faces[0][0][2]/2) <", "cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height", "source and binary forms, with or without modification, are permitted provided that the", "#creating required storage for face detection # equalize histogram cv.EqualizeHist(grayscale, grayscale) # show", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", "following conditions are met: * Redistributions of source code must retain the above", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES", "= 1 # assume we want second capture device(USB webcam), use i for", "http://creativecommons.org/licenses/by-nc-sa/3.0/legalcode ********************************************************************************\"\"\" \"\"\"/******************************************************************************* This code does image processing for face detection. The code", "Bombay. -*- c -*- All rights reserved. Redistribution and use in source and", "+ 1 #alarming for detected enemy if faceBeep > 3: faceBeep = 0", "right = right + 1 else: faceBeep = faceBeep + 1 #alarming for", "forward = 0 left = 0 right = 0 while 1: # do", "multiple faces multipleFacesInit = 0 forward = 0 left = 0 right =", "faces that may creep in. 'S' ----> Move Left 'T' ----> Move Right", "with the given image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and white", "if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right + 1 else:", "WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "/dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to 640 cv.SetCaptureProperty(capture,", "= 0 #for indicating that multiple faces multipleFacesInit = 0 forward = 0", "size of the image for handling generic image resolution, i.e. handling webcam with", "if no face is found multipleFaces = 0 #for indicating that multiple faces", "cv.CV_BGR2GRAY) #copying the black and white version of the image into the blank", "the following conditions are met: * Redistributions of source code must retain the", "of the image into the blank image # create storage storage = cv.CreateMemStorage(0)", "obove width and height parameters, the processing speed will be slower for larger", "right = 0 while x > 0: ser.write('T'+'\\r\\n') x = x - 1", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS", "stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting size of the image for", "480 #If you want to capture at native resolution of the web cam", "no face is found multipleFaces = 0 #for indicating that multiple faces multipleFacesInit", "- right left = 0 right = 0 while x > 0: ser.write('T'+'\\r\\n')", "= 0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n')", "if faces: for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1]", "faces: for i in faces: cv.Rectangle(image, (i[0][0], i[0][1]), (i[0][0] + i[0][2], i[0][1] +", "specific prior written permission. * Source code can be used for academic purpose.", "USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "'R' ----> Move Backward 'B' ----> Beep for one second 'Z' ----> Move", "multipleFacesInit == 1: multipleFaces = 0 multipleFacesInit = 0 #algining itself to face", "the author needs to be taken. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "image's resolution cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY) #copying the black and white version of the", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED", "multipleFacesInit = 0 multipleFaces = 1 ser.write('Q'+'\\r\\n') forward = forward + 1 faceBeep", "'B' ----> Beep for one second 'Z' ----> Move Fast Left ********************************************************************************/\"\"\" import", "x - 1 time.sleep(1) #waiting for clearing of serial buffer else: x =", "1 else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right +", "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES LOSS OF USE,", "float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n') right = right + 1 else: faceBeep = faceBeep", "time.sleep(1) #waiting for clearing of serial buffer # display webcam image cv.ShowImage('Raw', frame)", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR", "the black and white version of the image into the blank image #", "Move Forward 'R' ----> Move Backward 'B' ----> Beep for one second 'Z'", "buffer # display webcam image cv.ShowImage('Raw', frame) # handle events k = cv.WaitKey(10)", "Software released under Creative Commence cc by-nc-sa licence. For legal information refer to:", "in the image #These parameters are tweaked to RGB video captures, please refer", "forward + 1 faceBeep = 0 #Single face detected else: if multipleFacesInit ==", "notice, this list of conditions and the following disclaimer. * Redistributions in binary", "image # create storage storage = cv.CreateMemStorage(0) #creating required storage for face detection", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "Right 'Q' ----> Move Forward 'R' ----> Move Backward 'B' ----> Beep for", "capure device /dev/videoi capture = cv.CaptureFromCAM(device) cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_WIDTH, 640) #setting capture width to", "the processing speed will be slower for larger image resolutions # check if", "= 0 firstFaceDetected = 0 #retracing to position and orientation of multiple face", "ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1) #waiting for clearing of serial buffer", "i[0][2], i[0][1] + i[0][3]), (0, 255, 0), 3, 8, 0) return faces if", "= 0 multipleFacesInit = 0 #algining itself to face the enemy if faces[0][0][0]", "face in the image #drawing rectangles around the faces in the image if", "x - 1 time.sleep(1) #waiting for clearing of serial buffer # display webcam", "to position and orientation of multiple face detection faceNotDetected = faceNotDetected + 1", "cam don't set obove width and height parameters, the processing speed will be", "forward > 0: ser.write('R'+'\\r\\n') forward = forward - 1 time.sleep(1) #waiting for clearing", "0 while 1: # do forever # capture the current frame frame =", "detection if falseDetection > 10: falseDetection = 0 firstFaceDetected = 0 #retracing to", "#Getting size of the image for handling generic image resolution, i.e. handling webcam", "objects cascade = cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage,", "into the blank image # create storage storage = cv.CreateMemStorage(0) #creating required storage", "None, 1) # face detection faces = detect(frame) if len(faces) > 0: firstFaceDetected", "0: firstFaceDetected = 1 #multiple faces if len(faces) > 1: multipleFacesInit = 0", "detect(image): #Getting size of the image for handling generic image resolution, i.e. handling", "communication usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image):", "handling webcam with arbitrary resolution image_size = cv.GetSize(image) # create grayscale version grayscale", "left = left + 1 else: if faces[0][0][0] + float(faces[0][0][2]/2) > 380: ser.write('T'+'\\r\\n')", "== 0: falseDetection = falseDetection + 1 #handling false detection if falseDetection >", "frame) # handle events k = cv.WaitKey(10) if k == 0x1b: # ESC", "cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING) #detecting the faces in the image #These parameters", "640) #setting capture width to 640 cv.SetCaptureProperty(capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 480) #setting capture height to", "connections (the parameters differs on the device you are connecting to) ser =", "is OK if not capture: print \"Error opening capture device\" sys.exit(1) faceBeep =", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "print \"Error opening capture device\" sys.exit(1) faceBeep = 0 #Used for beeping control", "continue # mirror #cv.Flip(frame, None, 1) # face detection faces = detect(frame) if", "len(faces) > 0: firstFaceDetected = 1 #multiple faces if len(faces) > 1: multipleFacesInit", "time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') if multipleFaces == 0: firstFaceDetected = 0 else: #scouting", "0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces == 0: falseDetection = falseDetection +", "#detecting the faces in the image #These parameters are tweaked to RGB video", "second capture device(USB webcam), use i for the capure device /dev/videoi capture =", "> 380: ser.write('T'+'\\r\\n') right = right + 1 else: faceBeep = faceBeep +", "the enemy if faces[0][0][0] + float(faces[0][0][2]/2) < 260: ser.write('S'+'\\r\\n') left = left +", "AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "captures, please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces #printing the", "control firstFaceDetected = 0 #Used for face detection falseDetection = 0 #For false", "detection import time #for sleep function import serial #importing pyserial for serial communication", "1 #multiple faces if len(faces) > 1: multipleFacesInit = 0 multipleFaces = 1", "#scouting if firstFaceDetected == 0: ser.write('Z'+'\\r\\n') #print \"batu\\n\" else: if multipleFaces == 0:", "i[0][1] + i[0][3]), (0, 255, 0), 3, 8, 0) return faces if __name__", "8, 0) return faces if __name__ == \"__main__\": print \"Press ESC to exit", "faces #printing the rectangles circumscribing the face in the image #drawing rectangles around", "to) ser = serial.Serial( port='/dev/ttyUSB0', #The port where the serial communication usb is", "want to capture at native resolution of the web cam don't set obove", "you are connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The port where the serial", "0 #Used for face detection falseDetection = 0 #For false detection faceNotDetected =", "for checking if no face is found multipleFaces = 0 #for indicating that", "the rectangles circumscribing the face in the image #drawing rectangles around the faces", "for face detection. The code also handles false faces that may creep in.", "x = x - 1 time.sleep(1) #waiting for clearing of serial buffer else:", "please refer to http://opencv.willowgarage.com/documentation/python/objdetect_cascade_classification.html for tweaking your parameters. print faces #printing the rectangles", "detect(frame) if len(faces) > 0: firstFaceDetected = 1 #multiple faces if len(faces) >", "0 print 'Beeping for Faces' ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1) ser.write('B'+'\\r\\n') time.sleep(1)", "handling generic image resolution, i.e. handling webcam with arbitrary resolution image_size = cv.GetSize(image)", "PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "following disclaimer. * Redistributions in binary form must reproduce the above copyright notice,", "do forever # capture the current frame frame = cv.QueryFrame(capture) if frame is", "usb is present. baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) ser.open() ser.isOpen() def detect(image): #Getting", "1: # do forever # capture the current frame frame = cv.QueryFrame(capture) if", "device you are connecting to) ser = serial.Serial( port='/dev/ttyUSB0', #The port where the", "cv.Load('haarcascade_frontalface_alt.xml') #loading the Haar Cascade faces = cv.HaarDetectObjects(grayscale, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING)" ]
[ "Called by the ModuleHandler when shouldTrigger(message) is True. Contains all actions the module", "with high priority trigger before ones with low priority for any given message", "module is to perform on the message object. @type message: hubbot.message.IRCMessage @return: hubbot.response.IRCResponse", "= 0 ADMINS = 1 class ModuleInterface(object): \"\"\" The interface modules should inherit", "triggers - command words that cause the module to trigger. accepted_types - message", "yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot): \"\"\" @type bot:", "= 0 def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot", "def on_load(self): \"\"\" Called when the module is loaded by the ModuleHandler \"\"\"", "unloaded by the ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\" Called by the", "\"\"\" pass def should_trigger(self, message): \"\"\" Called by the ModuleHandler for each incoming", "priority trigger before ones with low priority for any given message object. \"\"\"", "[] accepted_types = [\"PRIVMSG\"] help = \"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE", "bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self): \"\"\" Called", "def on_trigger(self, message): \"\"\" Called by the ModuleHandler when shouldTrigger(message) is True. Contains", "with the ModuleHandler. triggers - command words that cause the module to trigger.", "access_level - whether the module should be trigger-able by ANYONE, or only users", "message: hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types: return False if message.command not", "self.triggers: return False return True def on_trigger(self, message): \"\"\" Called by the ModuleHandler", "by the ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called when the module is", "(PRIVMSG, ACTION, NOTICE). help - help text for the module. May be unicode", "ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called when the module is unloaded by", "trigger Default behavior is to trigger on any message that matches the accepted", "not in self.accepted_types: return False if message.command not in self.triggers: return False return", "returning a unicode object. access_level - whether the module should be trigger-able by", "users on the admin list. priority - the priority for the module. modules", "to trigger on any message that matches the accepted types and contains a", "matching command trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types: return", "modules should inherit and implement in order to function with the ModuleHandler. triggers", "priority for any given message object. \"\"\" triggers = [] accepted_types = [\"PRIVMSG\"]", "be trigger-able by ANYONE, or only users on the admin list. priority -", "the module to trigger. accepted_types - message types that can cause the module", "module is loaded by the ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called when", "cause the module to trigger. accepted_types - message types that can cause the", "= 1 class ModuleInterface(object): \"\"\" The interface modules should inherit and implement in", "the module to trigger (PRIVMSG, ACTION, NOTICE). help - help text for the", "module. modules with high priority trigger before ones with low priority for any", "command trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types: return False", "should be trigger-able by ANYONE, or only users on the admin list. priority", "class ModuleAccessLevel(Enum): ANYONE = 0 ADMINS = 1 class ModuleInterface(object): \"\"\" The interface", "\"\"\" triggers = [] accepted_types = [\"PRIVMSG\"] help = \"No help defined yet.\"", "= \"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0 def __init__(self,", "message object help(message), returning a unicode object. access_level - whether the module should", "loaded by the ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called when the module", "pass def should_trigger(self, message): \"\"\" Called by the ModuleHandler for each incoming message,", "unicode object. access_level - whether the module should be trigger-able by ANYONE, or", "ADMINS = 1 class ModuleInterface(object): \"\"\" The interface modules should inherit and implement", "defined yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot): \"\"\" @type", "each incoming message, to see if said message causes this module to trigger", "interface modules should inherit and implement in order to function with the ModuleHandler.", "\"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self): \"\"\" Called when", "any message that matches the accepted types and contains a matching command trigger", "bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self): \"\"\" Called when the module", "on_trigger(self, message): \"\"\" Called by the ModuleHandler when shouldTrigger(message) is True. Contains all", "hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self): \"\"\" Called when the module is", "command words that cause the module to trigger. accepted_types - message types that", "should_trigger(self, message): \"\"\" Called by the ModuleHandler for each incoming message, to see", "trigger on any message that matches the accepted types and contains a matching", "when the module is unloaded by the ModuleHandler \"\"\" pass def should_trigger(self, message):", "cause the module to trigger (PRIVMSG, ACTION, NOTICE). help - help text for", "module is unloaded by the ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\" Called", "see if said message causes this module to trigger Default behavior is to", "Default behavior is to trigger on any message that matches the accepted types", "the ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called when the module is unloaded", "trigger. accepted_types - message types that can cause the module to trigger (PRIVMSG,", "contains a matching command trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type not in", "shouldTrigger(message) is True. Contains all actions the module is to perform on the", "Enum class ModuleAccessLevel(Enum): ANYONE = 0 ADMINS = 1 class ModuleInterface(object): \"\"\" The", "return False if message.command not in self.triggers: return False return True def on_trigger(self,", "a function on the message object help(message), returning a unicode object. access_level -", "object help(message), returning a unicode object. access_level - whether the module should be", "ones with low priority for any given message object. \"\"\" triggers = []", "access_level = ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot", "inherit and implement in order to function with the ModuleHandler. triggers - command", "False if message.command not in self.triggers: return False return True def on_trigger(self, message):", "True. Contains all actions the module is to perform on the message object.", "message causes this module to trigger Default behavior is to trigger on any", "words that cause the module to trigger. accepted_types - message types that can", "a matching command trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types:", "to trigger (PRIVMSG, ACTION, NOTICE). help - help text for the module. May", "in order to function with the ModuleHandler. triggers - command words that cause", "whether the module should be trigger-able by ANYONE, or only users on the", "this module to trigger Default behavior is to trigger on any message that", "by the ModuleHandler for each incoming message, to see if said message causes", "in self.accepted_types: return False if message.command not in self.triggers: return False return True", "accepted_types - message types that can cause the module to trigger (PRIVMSG, ACTION,", "when the module is loaded by the ModuleHandler \"\"\" pass def on_unload(self): \"\"\"", "message object. \"\"\" triggers = [] accepted_types = [\"PRIVMSG\"] help = \"No help", "message.type not in self.accepted_types: return False if message.command not in self.triggers: return False", "a unicode object. access_level - whether the module should be trigger-able by ANYONE,", "that matches the accepted types and contains a matching command trigger @type message:", "high priority trigger before ones with low priority for any given message object.", "message): \"\"\" Called by the ModuleHandler when shouldTrigger(message) is True. Contains all actions", "that can cause the module to trigger (PRIVMSG, ACTION, NOTICE). help - help", "to trigger Default behavior is to trigger on any message that matches the", "be unicode or a function on the message object help(message), returning a unicode", "return True def on_trigger(self, message): \"\"\" Called by the ModuleHandler when shouldTrigger(message) is", "the accepted types and contains a matching command trigger @type message: hubbot.message.IRCMessage \"\"\"", "Called by the ModuleHandler for each incoming message, to see if said message", "incoming message, to see if said message causes this module to trigger Default", "perform on the message object. @type message: hubbot.message.IRCMessage @return: hubbot.response.IRCResponse | None \"\"\"", "return False return True def on_trigger(self, message): \"\"\" Called by the ModuleHandler when", "NOTICE). help - help text for the module. May be unicode or a", "is to perform on the message object. @type message: hubbot.message.IRCMessage @return: hubbot.response.IRCResponse |", "object. access_level - whether the module should be trigger-able by ANYONE, or only", "only users on the admin list. priority - the priority for the module.", "for the module. May be unicode or a function on the message object", "on any message that matches the accepted types and contains a matching command", "causes this module to trigger Default behavior is to trigger on any message", "when shouldTrigger(message) is True. Contains all actions the module is to perform on", "help(message), returning a unicode object. access_level - whether the module should be trigger-able", "any given message object. \"\"\" triggers = [] accepted_types = [\"PRIVMSG\"] help =", "is unloaded by the ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\" Called by", "the module is unloaded by the ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\"", "enum import Enum class ModuleAccessLevel(Enum): ANYONE = 0 ADMINS = 1 class ModuleInterface(object):", "def should_trigger(self, message): \"\"\" Called by the ModuleHandler for each incoming message, to", "message): \"\"\" Called by the ModuleHandler for each incoming message, to see if", "message that matches the accepted types and contains a matching command trigger @type", "self.accepted_types: return False if message.command not in self.triggers: return False return True def", "module should be trigger-able by ANYONE, or only users on the admin list.", "Called when the module is loaded by the ModuleHandler \"\"\" pass def on_unload(self):", "is loaded by the ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called when the", "the ModuleHandler when shouldTrigger(message) is True. Contains all actions the module is to", "trigger-able by ANYONE, or only users on the admin list. priority - the", "ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\" Called by the ModuleHandler for each", "triggers = [] accepted_types = [\"PRIVMSG\"] help = \"No help defined yet.\" access_level", "module to trigger (PRIVMSG, ACTION, NOTICE). help - help text for the module.", "= bot def on_load(self): \"\"\" Called when the module is loaded by the", "that cause the module to trigger. accepted_types - message types that can cause", "- whether the module should be trigger-able by ANYONE, or only users on", "@type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self): \"\"\" Called when the", "class ModuleInterface(object): \"\"\" The interface modules should inherit and implement in order to", "\"\"\" Called when the module is unloaded by the ModuleHandler \"\"\" pass def", "the ModuleHandler. triggers - command words that cause the module to trigger. accepted_types", "the module is loaded by the ModuleHandler \"\"\" pass def on_unload(self): \"\"\" Called", "can cause the module to trigger (PRIVMSG, ACTION, NOTICE). help - help text", "accepted types and contains a matching command trigger @type message: hubbot.message.IRCMessage \"\"\" if", "ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot", "the module. modules with high priority trigger before ones with low priority for", "\"\"\" Called when the module is loaded by the ModuleHandler \"\"\" pass def", "message.command not in self.triggers: return False return True def on_trigger(self, message): \"\"\" Called", "given message object. \"\"\" triggers = [] accepted_types = [\"PRIVMSG\"] help = \"No", "the ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\" Called by the ModuleHandler for", "accepted_types = [\"PRIVMSG\"] help = \"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority", "def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self):", "[\"PRIVMSG\"] help = \"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0", "bot def on_load(self): \"\"\" Called when the module is loaded by the ModuleHandler", "ModuleHandler for each incoming message, to see if said message causes this module", "order to function with the ModuleHandler. triggers - command words that cause the", "help = \"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0 def", "and contains a matching command trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type not", "the ModuleHandler for each incoming message, to see if said message causes this", "admin list. priority - the priority for the module. modules with high priority", "text for the module. May be unicode or a function on the message", "not in self.triggers: return False return True def on_trigger(self, message): \"\"\" Called by", "pass def on_unload(self): \"\"\" Called when the module is unloaded by the ModuleHandler", "def on_unload(self): \"\"\" Called when the module is unloaded by the ModuleHandler \"\"\"", "should inherit and implement in order to function with the ModuleHandler. triggers -", "behavior is to trigger on any message that matches the accepted types and", "on the admin list. priority - the priority for the module. modules with", "on_unload(self): \"\"\" Called when the module is unloaded by the ModuleHandler \"\"\" pass", "priority for the module. modules with high priority trigger before ones with low", "on the message object. @type message: hubbot.message.IRCMessage @return: hubbot.response.IRCResponse | None \"\"\" pass", "or a function on the message object help(message), returning a unicode object. access_level", "- help text for the module. May be unicode or a function on", "help text for the module. May be unicode or a function on the", "object. \"\"\" triggers = [] accepted_types = [\"PRIVMSG\"] help = \"No help defined", "from enum import Enum class ModuleAccessLevel(Enum): ANYONE = 0 ADMINS = 1 class", "unicode or a function on the message object help(message), returning a unicode object.", "\"\"\" Called by the ModuleHandler for each incoming message, to see if said", "ACTION, NOTICE). help - help text for the module. May be unicode or", "on the message object help(message), returning a unicode object. access_level - whether the", "self.bot = bot def on_load(self): \"\"\" Called when the module is loaded by", "the module. May be unicode or a function on the message object help(message),", "is True. Contains all actions the module is to perform on the message", "priority - the priority for the module. modules with high priority trigger before", "message, to see if said message causes this module to trigger Default behavior", "list. priority - the priority for the module. modules with high priority trigger", "= [] accepted_types = [\"PRIVMSG\"] help = \"No help defined yet.\" access_level =", "hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types: return False if message.command not in", "types that can cause the module to trigger (PRIVMSG, ACTION, NOTICE). help -", "function with the ModuleHandler. triggers - command words that cause the module to", "True def on_trigger(self, message): \"\"\" Called by the ModuleHandler when shouldTrigger(message) is True.", "the message object help(message), returning a unicode object. access_level - whether the module", "= ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\"", "and implement in order to function with the ModuleHandler. triggers - command words", "ModuleAccessLevel(Enum): ANYONE = 0 ADMINS = 1 class ModuleInterface(object): \"\"\" The interface modules", "ModuleInterface(object): \"\"\" The interface modules should inherit and implement in order to function", "help - help text for the module. May be unicode or a function", "all actions the module is to perform on the message object. @type message:", "for any given message object. \"\"\" triggers = [] accepted_types = [\"PRIVMSG\"] help", "message types that can cause the module to trigger (PRIVMSG, ACTION, NOTICE). help", "priority = 0 def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot =", "the priority for the module. modules with high priority trigger before ones with", "@type message: hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types: return False if message.command", "Called when the module is unloaded by the ModuleHandler \"\"\" pass def should_trigger(self,", "or only users on the admin list. priority - the priority for the", "before ones with low priority for any given message object. \"\"\" triggers =", "ModuleHandler when shouldTrigger(message) is True. Contains all actions the module is to perform", "0 ADMINS = 1 class ModuleInterface(object): \"\"\" The interface modules should inherit and", "\"\"\" pass def on_unload(self): \"\"\" Called when the module is unloaded by the", "if said message causes this module to trigger Default behavior is to trigger", "in self.triggers: return False return True def on_trigger(self, message): \"\"\" Called by the", "False return True def on_trigger(self, message): \"\"\" Called by the ModuleHandler when shouldTrigger(message)", "the module should be trigger-able by ANYONE, or only users on the admin", "trigger (PRIVMSG, ACTION, NOTICE). help - help text for the module. May be", "May be unicode or a function on the message object help(message), returning a", "for the module. modules with high priority trigger before ones with low priority", "0 def __init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def", "matches the accepted types and contains a matching command trigger @type message: hubbot.message.IRCMessage", "with low priority for any given message object. \"\"\" triggers = [] accepted_types", "trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type not in self.accepted_types: return False if", "1 class ModuleInterface(object): \"\"\" The interface modules should inherit and implement in order", "modules with high priority trigger before ones with low priority for any given", "= [\"PRIVMSG\"] help = \"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority =", "\"\"\" The interface modules should inherit and implement in order to function with", "__init__(self, bot): \"\"\" @type bot: hubbot.bot.Hubbot \"\"\" self.bot = bot def on_load(self): \"\"\"", "is to trigger on any message that matches the accepted types and contains", "function on the message object help(message), returning a unicode object. access_level - whether", "on_load(self): \"\"\" Called when the module is loaded by the ModuleHandler \"\"\" pass", "ANYONE, or only users on the admin list. priority - the priority for", "import Enum class ModuleAccessLevel(Enum): ANYONE = 0 ADMINS = 1 class ModuleInterface(object): \"\"\"", "implement in order to function with the ModuleHandler. triggers - command words that", "actions the module is to perform on the message object. @type message: hubbot.message.IRCMessage", "by the ModuleHandler \"\"\" pass def should_trigger(self, message): \"\"\" Called by the ModuleHandler", "\"No help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot):", "if message.command not in self.triggers: return False return True def on_trigger(self, message): \"\"\"", "the admin list. priority - the priority for the module. modules with high", "the module is to perform on the message object. @type message: hubbot.message.IRCMessage @return:", "module to trigger Default behavior is to trigger on any message that matches", "by the ModuleHandler when shouldTrigger(message) is True. Contains all actions the module is", "low priority for any given message object. \"\"\" triggers = [] accepted_types =", "- the priority for the module. modules with high priority trigger before ones", "- message types that can cause the module to trigger (PRIVMSG, ACTION, NOTICE).", "\"\"\" if message.type not in self.accepted_types: return False if message.command not in self.triggers:", "trigger before ones with low priority for any given message object. \"\"\" triggers", "for each incoming message, to see if said message causes this module to", "Contains all actions the module is to perform on the message object. @type", "to perform on the message object. @type message: hubbot.message.IRCMessage @return: hubbot.response.IRCResponse | None", "by ANYONE, or only users on the admin list. priority - the priority", "to trigger. accepted_types - message types that can cause the module to trigger", "\"\"\" self.bot = bot def on_load(self): \"\"\" Called when the module is loaded", "said message causes this module to trigger Default behavior is to trigger on", "if message.type not in self.accepted_types: return False if message.command not in self.triggers: return", "to see if said message causes this module to trigger Default behavior is", "ModuleHandler. triggers - command words that cause the module to trigger. accepted_types -", "module to trigger. accepted_types - message types that can cause the module to", "types and contains a matching command trigger @type message: hubbot.message.IRCMessage \"\"\" if message.type", "help defined yet.\" access_level = ModuleAccessLevel.ANYONE priority = 0 def __init__(self, bot): \"\"\"", "- command words that cause the module to trigger. accepted_types - message types", "to function with the ModuleHandler. triggers - command words that cause the module", "ANYONE = 0 ADMINS = 1 class ModuleInterface(object): \"\"\" The interface modules should", "module. May be unicode or a function on the message object help(message), returning", "\"\"\" Called by the ModuleHandler when shouldTrigger(message) is True. Contains all actions the", "The interface modules should inherit and implement in order to function with the" ]
[ "text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) #", "= 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a new", "clears it if the two do not match each other # Then after", "of cell rows reach 3, reset number of cell row and create a", "0 check_label = Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20)", "a delay then either changes the text to a check mark # or", "pady=20) grid_frame = card_frame(game_window) # Frame where all cards will be contained grid_frame.pack(padx=10,", "the original window, count selected, and deck generated. Creates a new window with", "Button(grid_frame) # Pass the button widget itself and the random card class, so", "mark # or clears it if the two do not match each other", "cell_column_count = 0 for button in button_list: button.config( width=6, height=3, font=(\"arial\", 15) )", "count, deck): \"\"\" Takes in the original window, count selected, and deck generated.", "* 2 cell_row_count = 0 cell_column_count = 0 for button in button_list: button.config(", "second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error", "threading import time import random from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck):", "deck[j])) # Second of the card pair card_button_pair = Button(grid_frame) # Pass the", "loop that creates 2 cards per 1 count for i in range(0, count):", "contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait, first, second): nonlocal times_checked #", "bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\") def show_and_hide(btn, card):", "config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) *", "check_label = Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame", "= btn.cget(\"text\") if btn_text == \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking)", "show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count = 0", "btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start()", "the card pair card_button = Button(grid_frame) # Pass the button widget itself and", "number of cell row and create a new column cell_row_count = 0 cell_column_count", "button.config( width=6, height=3, font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN: # if number", "random card class, so we can change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn,", "btn_text == \"✔\": print(\"You already matched!\") else: print(\"Oops you can't do that!\") #", "a new column cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise,", "card_frame def game_gui(window, count, deck): \"\"\" Takes in the original window, count selected,", "len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text", "20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame where all cards will be", "btn_text = btn.cget(\"text\") if btn_text == \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if", "card pair card_button_pair = Button(grid_frame) # Pass the button widget itself and the", "can do a function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\")", "else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal checking", "First of the card pair card_button = Button(grid_frame) # Pass the button widget", "{times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\")", "itself and the random card class, so we can change text config card_button_pair.config(command=lambda", "thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text == \"✔\": print(\"You", "second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\") def show_and_hide(btn,", "column cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create", "text to a check mark # or clears it if the two do", "btn.cget(\"text\") if btn_text == \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) ==", "that will hold all button widgets checking = [] # List that will", "will hold all button widgets checking = [] # List that will hold", "def game_over(): pass def judge_delay(wait, first, second): nonlocal times_checked # Sets a delay", "game_gui(window, count, deck): \"\"\" Takes in the original window, count selected, and deck", "the random card class, so we can change text config card_button.config(command=lambda btn=card_button, j=i:", "deck randomly button_list = [] # List that will hold all button widgets", "checking[0], checking[1])) thread.start() checking.clear() elif btn_text == \"✔\": print(\"You already matched!\") else: print(\"Oops", "of the card pair card_button_pair = Button(grid_frame) # Pass the button widget itself", "* import threading import time import random from gui_res.gui_frames.card_frame import card_frame def game_gui(window,", "and the random card class, so we can change text config card_button.config(command=lambda btn=card_button,", "pair card_button = Button(grid_frame) # Pass the button widget itself and the random", "deck generated. Creates a new window with the 3 params \"\"\" game_window =", "most recent buttons player pressed times_checked = 0 check_label = Label(game_window, text=f\"You checked", "class, so we can change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) #", "Creates a new window with the 3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck)", "will hold up to the 2 most recent buttons player pressed times_checked =", "1 count for i in range(0, count): # First of the card pair", "gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck): \"\"\" Takes in the original window,", "cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a new row for", "font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame where all cards", "threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text == \"✔\": print(\"You already matched!\")", "of cell row and create a new column cell_row_count = 0 cell_column_count +=", "grid_frame = card_frame(game_window) # Frame where all cards will be contained grid_frame.pack(padx=10, pady=10)", "width=6, height=3, font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN: # if number of", "generated. Creates a new window with the 3 params \"\"\" game_window = Toplevel(window)", "Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list = [] # List that will", "checking.clear() elif btn_text == \"✔\": print(\"You already matched!\") else: print(\"Oops you can't do", "= 0 check_label = Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20,", "per 1 count for i in range(0, count): # First of the card", "or clears it if the two do not match each other # Then", "text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of the card pair", "15) ) if cell_row_count == CARD_PER_COLUMN: # if number of cell rows reach", "checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame", "show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\") if btn_text == \"\": card_text =", "widgets checking = [] # List that will hold up to the 2", "+= 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a new row for the", "\"\"\" Takes in the original window, count selected, and deck generated. Creates a", "random card class, so we can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn,", "nonlocal times_checked # Sets a delay then either changes the text to a", "= [] # List that will hold all button widgets checking = []", "1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a new row for the card", "not match each other # Then after that checks if all boxes are", "button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count = 0 cell_column_count = 0", "checking btn_text = btn.cget(\"text\") if btn_text == \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn)", "randomly button_list = [] # List that will hold all button widgets checking", "Then after that checks if all boxes are checked, so it can do", "card_button_pair = Button(grid_frame) # Pass the button widget itself and the random card", "# Sets a delay then either changes the text to a check mark", "player pressed times_checked = 0 check_label = Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial", "List that will hold all button widgets checking = [] # List that", "button_list = [] # List that will hold all button widgets checking =", "Second of the card pair card_button_pair = Button(grid_frame) # Pass the button widget", "count selected, and deck generated. Creates a new window with the 3 params", "all boxes are checked, so it can do a function time.sleep(wait) try: times_checked", "times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\")", "check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over()", "button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a new row for the card cell_row_count", "second.config(text=\"\") except TclError: print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text =", "button_list: button.config( width=6, height=3, font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN: # if", "== second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An", "game_window = Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list = [] # List", "first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError:", "matched!\") else: print(\"Oops you can't do that!\") # A loop that creates 2", "up to the 2 most recent buttons player pressed times_checked = 0 check_label", "judge_delay(wait, first, second): nonlocal times_checked # Sets a delay then either changes the", "so it can do a function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked", "we can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list)", "for button in button_list: button.config( width=6, height=3, font=(\"arial\", 15) ) if cell_row_count ==", "2 most recent buttons player pressed times_checked = 0 check_label = Label(game_window, text=f\"You", "the card pair card_button_pair = Button(grid_frame) # Pass the button widget itself and", "change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN =", "range(0, count): # First of the card pair card_button = Button(grid_frame) # Pass", "each other # Then after that checks if all boxes are checked, so", "CARD_PER_COLUMN = round(count/4) * 2 cell_row_count = 0 cell_column_count = 0 for button", "with the 3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles deck randomly", "checking[1])) thread.start() checking.clear() elif btn_text == \"✔\": print(\"You already matched!\") else: print(\"Oops you", "card_frame(game_window) # Frame where all cards will be contained grid_frame.pack(padx=10, pady=10) def game_over():", "first, second): nonlocal times_checked # Sets a delay then either changes the text", "widget itself and the random card class, so we can change text config", "deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count = 0 cell_column_count", "will be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait, first, second): nonlocal", "checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else:", ") if cell_row_count == CARD_PER_COLUMN: # if number of cell rows reach 3,", "if btn_text == \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2:", "cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a", "to a check mark # or clears it if the two do not", "Otherwise, just create a new row for the card cell_row_count += 1 grid_frame.pack()", "def judge_delay(wait, first, second): nonlocal times_checked # Sets a delay then either changes", "List that will hold up to the 2 most recent buttons player pressed", "of the card pair card_button = Button(grid_frame) # Pass the button widget itself", "we can change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of", "if cell_row_count == CARD_PER_COLUMN: # if number of cell rows reach 3, reset", "if len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif", "reset number of cell row and create a new column cell_row_count = 0", "selected, and deck generated. Creates a new window with the 3 params \"\"\"", "card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2", "the two do not match each other # Then after that checks if", "other # Then after that checks if all boxes are checked, so it", "def show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\") if btn_text == \"\": card_text", "Sets a delay then either changes the text to a check mark #", "\"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list = [] #", "can't do that!\") # A loop that creates 2 cards per 1 count", "window, count selected, and deck generated. Creates a new window with the 3", "button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count = 0 cell_column_count =", "thread.start() checking.clear() elif btn_text == \"✔\": print(\"You already matched!\") else: print(\"Oops you can't", "Pass the button widget itself and the random card class, so we can", "# List that will hold up to the 2 most recent buttons player", "window with the 3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles deck", "btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count", "= threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text == \"✔\": print(\"You already", "j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count =", "and create a new column cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count)", "= card_frame(game_window) # Frame where all cards will be contained grid_frame.pack(padx=10, pady=10) def", "check mark # or clears it if the two do not match each", "button widget itself and the random card class, so we can change text", "# Second of the card pair card_button_pair = Button(grid_frame) # Pass the button", "# Pass the button widget itself and the random card class, so we", "game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal", "print(\"You already matched!\") else: print(\"Oops you can't do that!\") # A loop that", "times_checked # Sets a delay then either changes the text to a check", "random.shuffle(deck) # shuffles deck randomly button_list = [] # List that will hold", "do a function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\") if", "1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\")", "times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\")", "all cards will be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait, first,", "where all cards will be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait,", "try: times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\",", "first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\")", "so we can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair)", "show_and_hide(btn, deck[j])) # Second of the card pair card_button_pair = Button(grid_frame) # Pass", "occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\") if btn_text == \"\":", "change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of the card", "error occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\") if btn_text ==", "0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just create a new row", "card pair card_button = Button(grid_frame) # Pass the button widget itself and the", "nonlocal checking btn_text = btn.cget(\"text\") if btn_text == \"\": card_text = card.stringify() btn.config(text=card_text)", "after that checks if all boxes are checked, so it can do a", "time import random from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck): \"\"\" Takes", "for i in range(0, count): # First of the card pair card_button =", "a check mark # or clears it if the two do not match", "count): # First of the card pair card_button = Button(grid_frame) # Pass the", "= 0 cell_column_count = 0 for button in button_list: button.config( width=6, height=3, font=(\"arial\",", "btn_text == \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread", "hold up to the 2 most recent buttons player pressed times_checked = 0", "== \"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread =", "if all boxes are checked, so it can do a function time.sleep(wait) try:", "cell_row_count == CARD_PER_COLUMN: # if number of cell rows reach 3, reset number", "and deck generated. Creates a new window with the 3 params \"\"\" game_window", "TclError: print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\") if", "new window with the 3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles", "reach 3, reset number of cell row and create a new column cell_row_count", "== CARD_PER_COLUMN: # if number of cell rows reach 3, reset number of", "times_checked = 0 check_label = Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20))", "to the 2 most recent buttons player pressed times_checked = 0 check_label =", "in button_list: button.config( width=6, height=3, font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN: #", "0 cell_column_count = 0 for button in button_list: button.config( width=6, height=3, font=(\"arial\", 15)", "times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame where all", "# Otherwise, just create a new row for the card cell_row_count += 1", "else: print(\"Oops you can't do that!\") # A loop that creates 2 cards", "params \"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list = []", "card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5,", "CARD_PER_COLUMN: # if number of cell rows reach 3, reset number of cell", "creates 2 cards per 1 count for i in range(0, count): # First", "pass def judge_delay(wait, first, second): nonlocal times_checked # Sets a delay then either", "the 3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list", "{times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame where", "cell row and create a new column cell_row_count = 0 cell_column_count += 1", "import threading import time import random from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count,", "btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of the card pair card_button_pair = Button(grid_frame)", "<filename>gui_res/game_window.py from tkinter import * import threading import time import random from gui_res.gui_frames.card_frame", "from tkinter import * import threading import time import random from gui_res.gui_frames.card_frame import", "card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1]))", "== 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text ==", "the 2 most recent buttons player pressed times_checked = 0 check_label = Label(game_window,", "from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck): \"\"\" Takes in the original", "pressed times_checked = 0 check_label = Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\",", "Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window)", "== \"✔\": print(\"You already matched!\") else: print(\"Oops you can't do that!\") # A", "card class, so we can change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j]))", "= Button(grid_frame) # Pass the button widget itself and the random card class,", "random from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck): \"\"\" Takes in the", "checking.append(btn) if len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear()", "checked, so it can do a function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You", "= Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list = [] # List that", "do that!\") # A loop that creates 2 cards per 1 count for", "Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame where all cards will", "# or clears it if the two do not match each other #", "that creates 2 cards per 1 count for i in range(0, count): #", "0 for button in button_list: button.config( width=6, height=3, font=(\"arial\", 15) ) if cell_row_count", "grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait, first, second): nonlocal times_checked # Sets", "bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\") def", "if number of cell rows reach 3, reset number of cell row and", "can change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of the", "can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN", "new column cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) # Otherwise, just", "# if number of cell rows reach 3, reset number of cell row", "round(count/4) * 2 cell_row_count = 0 cell_column_count = 0 for button in button_list:", "time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"):", "the text to a check mark # or clears it if the two", "A loop that creates 2 cards per 1 count for i in range(0,", "deck): \"\"\" Takes in the original window, count selected, and deck generated. Creates", "a function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\")", "cell rows reach 3, reset number of cell row and create a new", "args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text == \"✔\": print(\"You already matched!\") else:", "# A loop that creates 2 cards per 1 count for i in", "height=3, font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN: # if number of cell", "\"\": card_text = card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread = threading.Thread(target=judge_delay,", "# List that will hold all button widgets checking = [] # List", "= card.stringify() btn.config(text=card_text) checking.append(btn) if len(checking) == 2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0],", "rows reach 3, reset number of cell row and create a new column", "card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of the card pair card_button_pair =", "all button widgets checking = [] # List that will hold up to", "= round(count/4) * 2 cell_row_count = 0 cell_column_count = 0 for button in", "two do not match each other # Then after that checks if all", "import time import random from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck): \"\"\"", "then either changes the text to a check mark # or clears it", "first.config(text=\"\") second.config(text=\"\") except TclError: print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text", "cards per 1 count for i in range(0, count): # First of the", "a new window with the 3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck) #", "card class, so we can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j]))", "if the two do not match each other # Then after that checks", "\"✔\": print(\"You already matched!\") else: print(\"Oops you can't do that!\") # A loop", "card_button = Button(grid_frame) # Pass the button widget itself and the random card", "be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait, first, second): nonlocal times_checked", "either changes the text to a check mark # or clears it if", "# shuffles deck randomly button_list = [] # List that will hold all", "count for i in range(0, count): # First of the card pair card_button", "3, reset number of cell row and create a new column cell_row_count =", "already matched!\") else: print(\"Oops you can't do that!\") # A loop that creates", "3 params \"\"\" game_window = Toplevel(window) random.shuffle(deck) # shuffles deck randomly button_list =", "column=cell_column_count) # Otherwise, just create a new row for the card cell_row_count +=", "shuffles deck randomly button_list = [] # List that will hold all button", "card): nonlocal checking btn_text = btn.cget(\"text\") if btn_text == \"\": card_text = card.stringify()", "config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second of the card pair card_button_pair", "do not match each other # Then after that checks if all boxes", "it if the two do not match each other # Then after that", "2: thread = threading.Thread(target=judge_delay, args=(0.5, checking[0], checking[1])) thread.start() checking.clear() elif btn_text == \"✔\":", "in the original window, count selected, and deck generated. Creates a new window", "pair card_button_pair = Button(grid_frame) # Pass the button widget itself and the random", "checking = [] # List that will hold up to the 2 most", "font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN: # if number of cell rows", "tkinter import * import threading import time import random from gui_res.gui_frames.card_frame import card_frame", "recent buttons player pressed times_checked = 0 check_label = Label(game_window, text=f\"You checked {times_checked}", "def game_gui(window, count, deck): \"\"\" Takes in the original window, count selected, and", "j=i: show_and_hide(btn, deck[j])) # Second of the card pair card_button_pair = Button(grid_frame) #", "print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\") if btn_text", "2 cards per 1 count for i in range(0, count): # First of", "Takes in the original window, count selected, and deck generated. Creates a new", "original window, count selected, and deck generated. Creates a new window with the", "game_over(): pass def judge_delay(wait, first, second): nonlocal times_checked # Sets a delay then", "it can do a function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked {times_checked}", "create a new column cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count, column=cell_column_count) #", "function time.sleep(wait) try: times_checked += 1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") ==", "that checks if all boxes are checked, so it can do a function", "# Frame where all cards will be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass", "text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button) button_list.append(card_button_pair) random.shuffle(button_list) CARD_PER_COLUMN = round(count/4)", "that!\") # A loop that creates 2 cards per 1 count for i", "you can't do that!\") # A loop that creates 2 cards per 1", "hold all button widgets checking = [] # List that will hold up", "= 0 for button in button_list: button.config( width=6, height=3, font=(\"arial\", 15) ) if", "= Label(game_window, text=f\"You checked {times_checked} times.\", font=(\"Arial Black\", 20)) check_label.pack(padx=20, pady=20) grid_frame =", "are checked, so it can do a function time.sleep(wait) try: times_checked += 1", "[] # List that will hold all button widgets checking = [] #", "checks if all boxes are checked, so it can do a function time.sleep(wait)", "so we can change text config card_button.config(command=lambda btn=card_button, j=i: show_and_hide(btn, deck[j])) # Second", "and the random card class, so we can change text config card_button_pair.config(command=lambda btn=card_button_pair,", "the button widget itself and the random card class, so we can change", "except TclError: print(\"An error occurred...\") def show_and_hide(btn, card): nonlocal checking btn_text = btn.cget(\"text\")", "buttons player pressed times_checked = 0 check_label = Label(game_window, text=f\"You checked {times_checked} times.\",", "in range(0, count): # First of the card pair card_button = Button(grid_frame) #", "check_label.pack(padx=20, pady=20) grid_frame = card_frame(game_window) # Frame where all cards will be contained", "button in button_list: button.config( width=6, height=3, font=(\"arial\", 15) ) if cell_row_count == CARD_PER_COLUMN:", "Frame where all cards will be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def", "# Then after that checks if all boxes are checked, so it can", "that will hold up to the 2 most recent buttons player pressed times_checked", "pady=10) def game_over(): pass def judge_delay(wait, first, second): nonlocal times_checked # Sets a", "import card_frame def game_gui(window, count, deck): \"\"\" Takes in the original window, count", "cards will be contained grid_frame.pack(padx=10, pady=10) def game_over(): pass def judge_delay(wait, first, second):", "elif btn_text == \"✔\": print(\"You already matched!\") else: print(\"Oops you can't do that!\")", "changes the text to a check mark # or clears it if the", "[] # List that will hold up to the 2 most recent buttons", "if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\", bg=\"green\") game_over() else: first.config(text=\"\") second.config(text=\"\") except", "the random card class, so we can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i:", "print(\"Oops you can't do that!\") # A loop that creates 2 cards per", "boxes are checked, so it can do a function time.sleep(wait) try: times_checked +=", "import random from gui_res.gui_frames.card_frame import card_frame def game_gui(window, count, deck): \"\"\" Takes in", "random.shuffle(button_list) CARD_PER_COLUMN = round(count/4) * 2 cell_row_count = 0 cell_column_count = 0 for", "cell_row_count = 0 cell_column_count = 0 for button in button_list: button.config( width=6, height=3,", "class, so we can change text config card_button_pair.config(command=lambda btn=card_button_pair, j=i: show_and_hide(btn, deck[j])) button_list.append(card_button)", "row and create a new column cell_row_count = 0 cell_column_count += 1 button.grid(row=cell_row_count,", "itself and the random card class, so we can change text config card_button.config(command=lambda", "2 cell_row_count = 0 cell_column_count = 0 for button in button_list: button.config( width=6,", "= [] # List that will hold up to the 2 most recent", "+= 1 check_label.config(text=f\"You checked {times_checked} times.\") if first.cget(\"text\") == second.cget(\"text\"): first.config(text=\"✔\", bg=\"green\") second.config(text=\"✔\",", "# First of the card pair card_button = Button(grid_frame) # Pass the button", "match each other # Then after that checks if all boxes are checked,", "number of cell rows reach 3, reset number of cell row and create", "delay then either changes the text to a check mark # or clears", "import * import threading import time import random from gui_res.gui_frames.card_frame import card_frame def", "i in range(0, count): # First of the card pair card_button = Button(grid_frame)", "second): nonlocal times_checked # Sets a delay then either changes the text to", "button widgets checking = [] # List that will hold up to the" ]
[ "subprocess.defname self.color = color self.protover = protover self.readyMoves = False self.readyOptions = False", "self.mode = NORMAL self.analyzing_paused = False def isAnalyzing(self): return self.mode in (ANALYZING, INVERSE_ANALYZING)", "pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ = {", "= 60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST,", "()), } # Setting engine options def __init__(self, subprocess, color, protover, md5): Engine.__init__(self,", "color, protover, md5): Engine.__init__(self, md5) self.engine = subprocess self.defname = subprocess.defname self.color =", "self.protover = protover self.readyMoves = False self.readyOptions = False self.connected = True self.mode", "NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST,", "self.readyOptions = False self.connected = True self.mode = NORMAL self.analyzing_paused = False def", "import Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine):", "{ \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine", "self.defname = subprocess.defname self.color = color self.protover = protover self.readyMoves = False self.readyOptions", "= False self.connected = True self.mode = NORMAL self.analyzing_paused = False def isAnalyzing(self):", "import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\":", "engine options def __init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine = subprocess", "ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), }", "(GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine options def __init__(self, subprocess, color, protover,", "self.engine = subprocess self.defname = subprocess.defname self.color = color self.protover = protover self.readyMoves", "False self.connected = True self.mode = NORMAL self.analyzing_paused = False def isAnalyzing(self): return", "self.connected = True self.mode = NORMAL self.analyzing_paused = False def isAnalyzing(self): return self.mode", "None, ()), } # Setting engine options def __init__(self, subprocess, color, protover, md5):", "md5) self.engine = subprocess self.defname = subprocess.defname self.color = color self.protover = protover", "protover self.readyMoves = False self.readyOptions = False self.connected = True self.mode = NORMAL", "()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine options def __init__(self, subprocess,", "subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine = subprocess self.defname = subprocess.defname self.color", "subprocess self.defname = subprocess.defname self.color = color self.protover = protover self.readyMoves = False", "= subprocess.defname self.color = color self.protover = protover self.readyMoves = False self.readyOptions =", "self.color = color self.protover = protover self.readyMoves = False self.readyOptions = False self.connected", "True self.mode = NORMAL self.analyzing_paused = False def isAnalyzing(self): return self.mode in (ANALYZING,", "TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\":", "= protover self.readyMoves = False self.readyOptions = False self.connected = True self.mode =", "class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()),", "False self.readyOptions = False self.connected = True self.mode = NORMAL self.analyzing_paused = False", "pychess.Players.Engine import Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class", "(GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine options def", "color self.protover = protover self.readyMoves = False self.readyOptions = False self.connected = True", "\"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine options def __init__(self, subprocess, color,", "self.readyMoves = False self.readyOptions = False self.connected = True self.mode = NORMAL self.analyzing_paused", "md5): Engine.__init__(self, md5) self.engine = subprocess self.defname = subprocess.defname self.color = color self.protover", "def __init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine = subprocess self.defname =", "60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None,", "= { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting", "INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()),", "= True self.mode = NORMAL self.analyzing_paused = False def isAnalyzing(self): return self.mode in", "__gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } #", "gi.repository import GObject from pychess.Players.Engine import Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING", "from pychess.Players.Engine import Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60", "# Setting engine options def __init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine", "Setting engine options def __init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine =", "protover, md5): Engine.__init__(self, md5) self.engine = subprocess self.defname = subprocess.defname self.color = color", "Engine.__init__(self, md5) self.engine = subprocess self.defname = subprocess.defname self.color = color self.protover =", "__init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine = subprocess self.defname = subprocess.defname", "None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine options def __init__(self,", "from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ =", "= subprocess self.defname = subprocess.defname self.color = color self.protover = protover self.readyMoves =", "\"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None, ()), \"readyForMoves\": (GObject.SignalFlags.RUN_FIRST, None, ()), } # Setting engine options", "} # Setting engine options def __init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5)", "options def __init__(self, subprocess, color, protover, md5): Engine.__init__(self, md5) self.engine = subprocess self.defname", "= False self.readyOptions = False self.connected = True self.mode = NORMAL self.analyzing_paused =", "= color self.protover = protover self.readyMoves = False self.readyOptions = False self.connected =", "Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__", "ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND = 60 class ProtocolEngine(Engine): __gsignals__ = { \"readyForOptions\": (GObject.SignalFlags.RUN_FIRST, None,", "from gi.repository import GObject from pychess.Players.Engine import Engine from pychess.Utils.const import NORMAL, ANALYZING,", "import GObject from pychess.Players.Engine import Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND", "GObject from pychess.Players.Engine import Engine from pychess.Utils.const import NORMAL, ANALYZING, INVERSE_ANALYZING TIME_OUT_SECOND =" ]
[ "bits: i = 0 new_bits.append(\"\") while i < len(b): if burst_probability > numpy.random.random():", "of bits \"\"\" new_bits = [] for b in bits: if probability >", "probability for an error[0-1] RETURN: a list of bits \"\"\" new_bits = []", "of bits, the probability for an error[0-1] RETURN: a list of bits \"\"\"", "bits___________________ def create_random_bits_list(self, len): \"\"\"create a random len bits long bitstring \"\"\" bits", "new_bits = \"\" i = 0 while i < len(bits): if burst_probability >", "the probability to leave the bursterror[0-1] Return: String of bits with added burst", "str((int(b) + 1) % 2) # turn 0 to 1 and 1 to", "b return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply flip bits", "new_bit = \"\" for i in range(len(b)): if probability > numpy.random.random(): # roll", "probability to leave the bursterror[0-1] Return: list of bits with added burst erorrs", "if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i]) + 1) %", "ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i]", "to 0 randomly else: new_bits += str(bits[i]) currently_bursting = False i += 1", "0 to 1 and 1 to 0 else: new_bit += str(b[i]) new_bits.append(new_bit) return", "numpy.random.random(): # roll random numbers currently_bursting = True while currently_bursting and i <", "# print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr = c.randomise_bits_burst_string(rb,0.01,.9) # print (c.compare_and_highlight_differences(rb,rr))", "= [] for b in bits: new_bit = \"\" for i in range(len(b)):", "new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting = False i += 1 else: new_bits[len(new_bits)", "__init__(self): return # _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a random len bits", "hold usefull funktions to simulate noise in a channel\"\"\" def __init__(self): return #", "print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr = c.randomise_bits_burst_string(rb,0.01,.9) # print (c.compare_and_highlight_differences(rb,rr)) #", "new_bits += b return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply", "list of bits, the probability for an error[0-1] Return: a string full of", "def randomise_bits_string(self, bits, probability): \"\"\"A function to simply flip bits with the given", "b ): # stop when bitstream ends (simulate one bursterror and adjust i)", "# turn 0 to 1 and 1 to 0 else: new_bits += b", "in bits: if probability > numpy.random.random(): # roll random numbers new_bits.append((b + 1)", "probability ARGS: a String of bits, the probability for an error[0-1], the probability", "< len(bits): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting = True", "> numpy.random.random(): new_bits.append( (bits[i] + 1) % 2 ) # turn 0 to", "[] for b in bits: if probability > numpy.random.random(): # roll random numbers", "channel_noise_simulator: \"\"\"Class to hold usefull funktions to simulate noise in a channel\"\"\" def", "bits = [] for i in range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self,", "bursterror[0-1] Return: String of bits with added burst error \"\"\" new_bits = []", "+ 1) % 2 ) # turn 0 to 1 and 1 to", "to leave the bursterror[0-1] Return: String of bits with added burst erorrs \"\"\"", "bits1, bits2): \"\"\"compare two bitlists and higlight the differences\"\"\" differences = [] if", "+ 1) % 2) ) # turn 0 to 1 and 1 to", "of bits, the probability for an error[0-1] Return: a string full of bits", "function to simply flip bits with the given probability ARGS: a list of", "numbers new_bits += str((int(b) + 1) % 2) # turn 0 to 1", "(simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] +", "two bitlists and higlight the differences\"\"\" differences = [] if len(bits1) != len(bits2):", "burst erorrs \"\"\" new_bits = \"\" i = 0 while i < len(bits):", "str((int(b[i]) + 1) % 2) # turn 0 to 1 and 1 to", "\"\"\"compare two bitlists and higlight the differences\"\"\" differences = [] if len(bits1) !=", "a list of bits, the probability for an error[0-1] RETURN: a list of", "and 1 to 0 randomly else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting =", "+= 1 else: new_bits.append(bits[i]) i += 1 return new_bits def randomise_bits_burst_string( self, bits,", "bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply flip bits with the given", "randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits with", "random numbers currently_bursting = True while currently_bursting and i < len( b ):", "1 else: new_bits[len(new_bits) - 1] += str(b[i]) i += 1 return new_bits def", "to leave the bursterror[0-1] Return: list of bits with added burst erorrs \"\"\"", "len bits long bitstring \"\"\" bits = [] for i in range(len): bits.append(numpy.random.randint(0,", "1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting = False i += 1 else:", "Return: String of bits with added burst erorrs \"\"\" new_bits = \"\" i", "different lengths detected. may result in higher errorrate\") min_length = min(len(bits1), len(bits2)) for", "> numpy.random.random(): # roll random numbers currently_bursting = True while currently_bursting and i", "b in bits: if probability > numpy.random.random(): # roll random numbers new_bits +=", "str( ((int(bits[i]) + 1) % 2) ) # turn 0 to 1 and", "b in bits: if probability > numpy.random.random(): # roll random numbers new_bits.append((b +", "detected. may result in higher errorrate\") min_length = min(len(bits1), len(bits2)) for i in", "bits \"\"\" new_bits = \"\" for b in bits: if probability > numpy.random.random():", "1) % 2 ) # turn 0 to 1 and 1 to 0", "for an error[0-1] RETURN: a list of bits \"\"\" new_bits = [] for", "return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1]))", "i < len(b): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting =", "a list of bits, the probability for an error[0-1], the probability to leave", "if probability > numpy.random.random(): # roll random numbers new_bits += str((int(b) + 1)", ") # turn 0 to 1 and 1 to 0 randomly else: new_bits.append(bits[i])", "% 2) # turn 0 to 1 and 1 to 0 else: new_bit", "probability > numpy.random.random(): # roll random numbers new_bit += str((int(b[i]) + 1) %", "bursterror[0-1] Return: String of bits with added burst erorrs \"\"\" new_bits = \"\"", "True while currently_bursting and i < len( b ): # stop when bitstream", "str(differences.count(True))) return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print", "probability ARGS: a list of bits, the probability for an error[0-1] RETURN: a", "[] for i in range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len): \"\"\"create", "i < len(bits): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting =", "print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr = c.randomise_bits_burst_string(rb,0.01,.9) #", "String of bits, the probability for an error[0-1], the probability to leave the", "flip bits with the given probability ARGS: a String of bits, the probability", "the bursterror[0-1] Return: String of bits with added burst error \"\"\" new_bits =", "new_bits.append( (bits[i] + 1) % 2 ) # turn 0 to 1 and", "b in bits: i = 0 new_bits.append(\"\") while i < len(b): if burst_probability", "> numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i]) + 1) % 2) )", "# ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and higlight the", "% 2) # turn 0 to 1 and 1 to 0 else: new_bits", "def randomise_bits_list(self, bits, probability): \"\"\"A function to simply flip bits with the given", "# _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function to simply flip bits", "\"\"\" new_bits = [] for b in bits: new_bit = \"\" for i", "bits: new_bit = \"\" for i in range(len(b)): if probability > numpy.random.random(): #", "an error[0-1], the probability to leave the bursterror[0-1] Return: String of bits with", "list of bits with added burst erorrs \"\"\" new_bits = [] i =", ") # turn 0 to 1 and 1 to 0 randomly else: new_bits", "False i += 1 else: new_bits[len(new_bits) - 1] += str(b[i]) i += 1", "to 0 randomly else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting = False i", "string full of bits \"\"\" new_bits = \"\" for b in bits: if", "0 randomly else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting = False i +=", "\"\"\"A function to simply flip bits with the given probability ARGS: a list", "\"\" for i in range(len(b)): if probability > numpy.random.random(): # roll random numbers", "len): \"\"\"create a random len bits long bitstring \"\"\" bits = [] for", "bits with added burst erorrs \"\"\" new_bits = [] i = 0 while", "def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits", "1 return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists", "random numbers new_bit += str((int(b[i]) + 1) % 2) # turn 0 to", "differences = [] if len(bits1) != len(bits2): print(\"waning, different lengths detected. may result", "randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply flip bits with", "\"\"\"A function to simply flip bits with the given probability ARGS: a String", "with the given probability ARGS: a String of bits, the probability for an", "currently_bursting = False i += 1 else: new_bits += str(bits[i]) i += 1", "in range(len): bits += str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________ def randomise_bits_list(self,", "def __init__(self): return # _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a random len", "the probability for an error[0-1], the probability to leave the bursterror[0-1] Return: String", "= 0 new_bits.append(\"\") while i < len(b): if burst_probability > numpy.random.random(): # roll", "Return: list of bits with added burst erorrs \"\"\" new_bits = [] i", "randomly else: new_bits += str(bits[i]) currently_bursting = False i += 1 else: new_bits", "< len( bits ): # stop when bitstream ends (simulate one bursterror and", "else: new_bits[len(new_bits) - 1] += str(b[i]) i += 1 return new_bits def randomise_bits_burst_list(", "(simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits += str(", "for an error[0-1], the probability to leave the bursterror[0-1] Return: list of bits", "new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip", "new_bits[len(new_bits) - 1] += str(b[i]) i += 1 return new_bits def randomise_bits_burst_list( self,", "of bits with added burst erorrs \"\"\" new_bits = [] i = 0", "= 0 while i < len(bits): if burst_probability > numpy.random.random(): # roll random", "turn 0 to 1 and 1 to 0 else: new_bits.append(b) return new_bits def", "< len( b ): # stop when bitstream ends (simulate one bursterror and", "numpy.random.random(): # roll random numbers new_bit += str((int(b[i]) + 1) % 2) #", "the probability for an error[0-1] RETURN: a list of bits \"\"\" new_bits =", "error[0-1] Return: a string full of bits \"\"\" new_bits = \"\" for b", "error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1) % 2 ) # turn 0", "and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i])", "bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i]) +", "% 2) # turn 0 to 1 and 1 to 0 else: new_bits.append(b)", "simply flip bits with the given probability ARGS: a String of bits, the", "# print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr = c.randomise_bits_burst_string(rb,0.01,.9)", "errorrate\") min_length = min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1 if bits1[i] !=", "len(bits2)) for i in range(min_length): differences.append(1 if bits1[i] != bits2[i] else 0) print(\"Differences", "new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and higlight", "probability to leave the bursterror[0-1] Return: String of bits with added burst erorrs", "def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits", "return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply", ") # turn 0 to 1 and 1 to 0 randomly else: new_bits[len(new_bits)", "added burst erorrs \"\"\" new_bits = [] i = 0 while i <", "2 ) # turn 0 to 1 and 1 to 0 randomly else:", "to 1 and 1 to 0 randomly else: new_bits += str(bits[i]) currently_bursting =", "i += 1 else: new_bits[len(new_bits) - 1] += str(b[i]) i += 1 return", "= True while currently_bursting and i < len( bits ): # stop when", "1 and 1 to 0 randomly else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting", "+= str(bits[i]) currently_bursting = False i += 1 else: new_bits += str(bits[i]) i", "result in higher errorrate\") min_length = min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1", "= True while currently_bursting and i < len( b ): # stop when", "- 1] += str( ((int(b[i]) + 1) % 2) ) # turn 0", "the probability for an error[0-1], the probability to leave the bursterror[0-1] Return: list", "bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function to simply flip bits with the", "+= str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A", "0 randomly else: new_bits += str(bits[i]) currently_bursting = False i += 1 else:", "bursterror[0-1] Return: list of bits with added burst erorrs \"\"\" new_bits = []", "currently_bursting and i < len( bits ): # stop when bitstream ends (simulate", "for an error[0-1], the probability to leave the bursterror[0-1] Return: String of bits", "if probability > numpy.random.random(): # roll random numbers new_bit += str((int(b[i]) + 1)", "roll random numbers currently_bursting = True while currently_bursting and i < len( b", "new_bits[len(new_bits) - 1] += str( ((int(b[i]) + 1) % 2) ) # turn", "bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function to simply flip", "an error[0-1] RETURN: a list of bits \"\"\" new_bits = [] for b", "probability for an error[0-1] Return: a string full of bits \"\"\" new_bits =", "print(\"waning, different lengths detected. may result in higher errorrate\") min_length = min(len(bits1), len(bits2))", "while i < len(bits): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting", "range(len(b)): if probability > numpy.random.random(): # roll random numbers new_bit += str((int(b[i]) +", "for i in range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len): \"\"\"create a", "if len(bits1) != len(bits2): print(\"waning, different lengths detected. may result in higher errorrate\")", "\"\"\"Class to hold usefull funktions to simulate noise in a channel\"\"\" def __init__(self):", "self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply flip bits with the", "differences\"\"\" differences = [] if len(bits1) != len(bits2): print(\"waning, different lengths detected. may", "len(bits2): print(\"waning, different lengths detected. may result in higher errorrate\") min_length = min(len(bits1),", "probability to leave the bursterror[0-1] Return: String of bits with added burst error", "random numbers new_bits.append((b + 1) % 2) # turn 0 to 1 and", "turn 0 to 1 and 1 to 0 else: new_bits += b return", "- 1] += str(b[i]) currently_bursting = False i += 1 else: new_bits[len(new_bits) -", "1] += str( ((int(b[i]) + 1) % 2) ) # turn 0 to", "- 1] += str(b[i]) i += 1 return new_bits def randomise_bits_burst_list( self, bits,", "to simply flip bits with the given probability ARGS: a String of bits,", "i) if error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i]) + 1) % 2)", "the bursterror[0-1] Return: String of bits with added burst erorrs \"\"\" new_bits =", "i += 1 else: new_bits.append(bits[i]) i += 1 return new_bits def randomise_bits_burst_string( self,", "(bits[i] + 1) % 2 ) # turn 0 to 1 and 1", "leave the bursterror[0-1] Return: String of bits with added burst error \"\"\" new_bits", "higher errorrate\") min_length = min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1 if bits1[i]", "probability ARGS: a list of bits, the probability for an error[0-1], the probability", "# turn 0 to 1 and 1 to 0 randomly else: new_bits[len(new_bits) -", "2) # turn 0 to 1 and 1 to 0 else: new_bits +=", "randomise_bits_list(self, bits, probability): \"\"\"A function to simply flip bits with the given probability", "= False i += 1 else: new_bits[len(new_bits) - 1] += str(b[i]) i +=", "to leave the bursterror[0-1] Return: String of bits with added burst error \"\"\"", "higlight the differences\"\"\" differences = [] if len(bits1) != len(bits2): print(\"waning, different lengths", "the bursterror[0-1] Return: list of bits with added burst erorrs \"\"\" new_bits =", "\"\"\" new_bits = [] for b in bits: if probability > numpy.random.random(): #", "(c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr =", "the given probability ARGS: a list of bits, the probability for an error[0-1]", "if bits1[i] != bits2[i] else 0) print(\"Differences found: \" + str(differences.count(True))) return differences", "def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and higlight the differences\"\"\" differences =", "new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function to simply flip bits", "numbers new_bit += str((int(b[i]) + 1) % 2) # turn 0 to 1", "+= str( ((int(b[i]) + 1) % 2) ) # turn 0 to 1", "list of bits \"\"\" new_bits = [] for b in bits: new_bit =", "erorrs \"\"\" new_bits = [] i = 0 while i < len(bits): if", "bits: if probability > numpy.random.random(): # roll random numbers new_bits.append((b + 1) %", "bits, the probability for an error[0-1], the probability to leave the bursterror[0-1] Return:", "with added burst erorrs \"\"\" new_bits = \"\" i = 0 while i", "\"\"\" bits = \"\" for i in range(len): bits += str(numpy.random.randint(0, 2)) return", "+= 1 return new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function", "while currently_bursting and i < len( b ): # stop when bitstream ends", "bits, the probability for an error[0-1] Return: a string full of bits \"\"\"", "random numbers new_bits += str((int(b) + 1) % 2) # turn 0 to", "list of bits, the probability for an error[0-1] RETURN: a list of bits", "str(b[i]) currently_bursting = False i += 1 else: new_bits[len(new_bits) - 1] += str(b[i])", "in range(min_length): differences.append(1 if bits1[i] != bits2[i] else 0) print(\"Differences found: \" +", "error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i]) + 1) % 2)", "and 1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting = False i += 1", "randomly else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting = False i += 1", "to 0 randomly else: new_bits.append(bits[i]) currently_bursting = False i += 1 else: new_bits.append(bits[i])", "a list of bits \"\"\" new_bits = [] for b in bits: new_bit", "added burst erorrs \"\"\" new_bits = \"\" i = 0 while i <", "else: new_bits += b return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function to", "probability ARGS: a list of bits, the probability for an error[0-1] Return: a", "and i < len( bits ): # stop when bitstream ends (simulate one", "given probability ARGS: a list of bits, the probability for an error[0-1], the", "0 to 1 and 1 to 0 randomly else: new_bits += str(bits[i]) currently_bursting", "new_bits = [] for b in bits: new_bit = \"\" for i in", "the given probability ARGS: a list of bits, the probability for an error[0-1],", "2) # turn 0 to 1 and 1 to 0 else: new_bit +=", "turn 0 to 1 and 1 to 0 randomly else: new_bits[len(new_bits) - 1]", "i in range(min_length): differences.append(1 if bits1[i] != bits2[i] else 0) print(\"Differences found: \"", "\"\" for b in bits: if probability > numpy.random.random(): # roll random numbers", "> numpy.random.random(): # roll random numbers new_bit += str((int(b[i]) + 1) % 2)", "bits long bitstring \"\"\" bits = [] for i in range(len): bits.append(numpy.random.randint(0, 2))", "to 0 else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function to", "if error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i]) + 1) % 2) )", "the probability to leave the bursterror[0-1] Return: list of bits with added burst", "to 0 else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits,", "burst erorrs \"\"\" new_bits = [] i = 0 while i < len(bits):", "False i += 1 else: new_bits += str(bits[i]) i += 1 return new_bits", "# turn 0 to 1 and 1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting", "new_bits += str(bits[i]) i += 1 return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self,", "for b in bits: if probability > numpy.random.random(): # roll random numbers new_bits", "for i in range(min_length): differences.append(1 if bits1[i] != bits2[i] else 0) print(\"Differences found:", "roll random numbers new_bits += str((int(b) + 1) % 2) # turn 0", "error \"\"\" new_bits = [] currently_bursting = False for b in bits: i", "= [] if len(bits1) != len(bits2): print(\"waning, different lengths detected. may result in", "random len bits long string \"\"\" bits = \"\" for i in range(len):", "burst_probability > numpy.random.random(): # roll random numbers currently_bursting = True while currently_bursting and", "+= str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A", "randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits with", "\"\"\" new_bits = \"\" for b in bits: if probability > numpy.random.random(): #", "function to simply flip bits with the given probability ARGS: a String of", "for an error[0-1] Return: a string full of bits \"\"\" new_bits = \"\"", "= \"\" for i in range(len(b)): if probability > numpy.random.random(): # roll random", "% 2 ) # turn 0 to 1 and 1 to 0 randomly", "numpy.random.random(): new_bits.append( (bits[i] + 1) % 2 ) # turn 0 to 1", "RETURN: a list of bits \"\"\" new_bits = [] for b in bits:", "len(bits1) != len(bits2): print(\"waning, different lengths detected. may result in higher errorrate\") min_length", "a string full of bits \"\"\" new_bits = \"\" for b in bits:", "1 to 0 randomly else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting = False", "String of bits with added burst error \"\"\" new_bits = [] currently_bursting =", "probability > numpy.random.random(): # roll random numbers new_bits.append((b + 1) % 2) #", "a random len bits long bitstring \"\"\" bits = [] for i in", "numbers currently_bursting = True while currently_bursting and i < len( b ): #", "adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i]) +", "len( b ): # stop when bitstream ends (simulate one bursterror and adjust", "and 1 to 0 randomly else: new_bits += str(bits[i]) currently_bursting = False i", "% 2) ) # turn 0 to 1 and 1 to 0 randomly", "_____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function to simply flip bits with", "bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits with the given", "and 1 to 0 else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A", "new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip", "range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len): \"\"\"create a random len bits", "bits def create_random_bits_string(self, len): \"\"\"create a random len bits long string \"\"\" bits", "# turn 0 to 1 and 1 to 0 randomly else: new_bits +=", "1 and 1 to 0 else: new_bits += b return new_bits def randomise_bits_string_list(self,", "while currently_bursting and i < len( bits ): # stop when bitstream ends", "differences.append(1 if bits1[i] != bits2[i] else 0) print(\"Differences found: \" + str(differences.count(True))) return", "turn 0 to 1 and 1 to 0 randomly else: new_bits += str(bits[i])", "# c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print", "+= str((int(b[i]) + 1) % 2) # turn 0 to 1 and 1", "str(bits[i]) i += 1 return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2):", "new_bits = [] currently_bursting = False for b in bits: i = 0", "return new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply", "in range(len(b)): if probability > numpy.random.random(): # roll random numbers new_bit += str((int(b[i])", "and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1) % 2", "lengths detected. may result in higher errorrate\") min_length = min(len(bits1), len(bits2)) for i", "1 and 1 to 0 randomly else: new_bits += str(bits[i]) currently_bursting = False", "i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i]) + 1)", "= \"\" i = 0 while i < len(bits): if burst_probability > numpy.random.random():", "1 to 0 else: new_bits += b return new_bits def randomise_bits_string_list(self, bits, probability):", "in higher errorrate\") min_length = min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1 if", "\"\" i = 0 while i < len(bits): if burst_probability > numpy.random.random(): #", "str( ((int(b[i]) + 1) % 2) ) # turn 0 to 1 and", "0 while i < len(bits): if burst_probability > numpy.random.random(): # roll random numbers", "else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function to simply flip", "bits with the given probability ARGS: a String of bits, the probability for", "the differences\"\"\" differences = [] if len(bits1) != len(bits2): print(\"waning, different lengths detected.", "True while currently_bursting and i < len( bits ): # stop when bitstream", "): # stop when bitstream ends (simulate one bursterror and adjust i) if", "error[0-1], the probability to leave the bursterror[0-1] Return: list of bits with added", "2) ) # turn 0 to 1 and 1 to 0 randomly else:", "found: \" + str(differences.count(True))) return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print", "new_bits.append(bits[i]) currently_bursting = False i += 1 else: new_bits.append(bits[i]) i += 1 return", "new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply flip bits with the", "1 and 1 to 0 else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability):", "numpy.random.random(): # roll random numbers new_bits += str((int(b) + 1) % 2) #", "Return: String of bits with added burst error \"\"\" new_bits = [] currently_bursting", "# _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a random len bits long bitstring", "def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply flip bits", "((int(bits[i]) + 1) % 2) ) # turn 0 to 1 and 1", "((int(b[i]) + 1) % 2) ) # turn 0 to 1 and 1", "randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply flip bits with the given probability", "i = 0 new_bits.append(\"\") while i < len(b): if burst_probability > numpy.random.random(): #", "currently_bursting and i < len( b ): # stop when bitstream ends (simulate", "1] += str(b[i]) i += 1 return new_bits def randomise_bits_burst_list( self, bits, burst_probability,", "+ str(differences.count(True))) return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) #", "string \"\"\" bits = \"\" for i in range(len): bits += str(numpy.random.randint(0, 2))", "i < len( bits ): # stop when bitstream ends (simulate one bursterror", "len): \"\"\"create a random len bits long string \"\"\" bits = \"\" for", "to simply flip bits with the given probability ARGS: a list of bits,", "= [] for i in range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len):", "i in range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len): \"\"\"create a random", "new_bits.append((b + 1) % 2) # turn 0 to 1 and 1 to", "return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function to simply flip bits with", "> numpy.random.random(): # roll random numbers new_bits.append((b + 1) % 2) # turn", "and 1 to 0 else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list(", "_____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a random len bits long bitstring \"\"\"", "i in range(len): bits += str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________ def", "1) % 2) ) # turn 0 to 1 and 1 to 0", "of bits \"\"\" new_bits = [] for b in bits: new_bit = \"\"", "bits, the probability for an error[0-1] RETURN: a list of bits \"\"\" new_bits", "# roll random numbers currently_bursting = True while currently_bursting and i < len(", "for b in bits: i = 0 new_bits.append(\"\") while i < len(b): if", "and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i]) + 1)", "bits2): \"\"\"compare two bitlists and higlight the differences\"\"\" differences = [] if len(bits1)", "erorrs \"\"\" new_bits = \"\" i = 0 while i < len(bits): if", "channel\"\"\" def __init__(self): return # _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a random", "+= 1 else: new_bits[len(new_bits) - 1] += str(b[i]) i += 1 return new_bits", "bits with added burst error \"\"\" new_bits = [] currently_bursting = False for", "1 else: new_bits += str(bits[i]) i += 1 return new_bits # ______________compare bits__________________________", "0 else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability,", "): \"\"\"A function to simply flip bits with the given probability ARGS: a", "of bits \"\"\" new_bits = \"\" for b in bits: if probability >", "new_bits = \"\" for b in bits: if probability > numpy.random.random(): # roll", "\"\"\" new_bits = \"\" i = 0 while i < len(bits): if burst_probability", "for i in range(len(b)): if probability > numpy.random.random(): # roll random numbers new_bit", "to hold usefull funktions to simulate noise in a channel\"\"\" def __init__(self): return", "probability): \"\"\"A function to simply flip bits with the given probability ARGS: a", "(c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200)", "random len bits long bitstring \"\"\" bits = [] for i in range(len):", "currently_bursting = True while currently_bursting and i < len( b ): # stop", "+= str(b[i]) currently_bursting = False i += 1 else: new_bits[len(new_bits) - 1] +=", "new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function to simply flip bits with the", "\"\"\" new_bits = [] currently_bursting = False for b in bits: i =", "simply flip bits with the given probability ARGS: a list of bits, the", "(simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1]", "an error[0-1] Return: a string full of bits \"\"\" new_bits = \"\" for", "one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1)", "stop when bitstream ends (simulate one bursterror and adjust i) if error_rate_in_burst >", "class channel_noise_simulator: \"\"\"Class to hold usefull funktions to simulate noise in a channel\"\"\"", "______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and higlight the differences\"\"\"", "with the given probability ARGS: a list of bits, the probability for an", "in bits: i = 0 new_bits.append(\"\") while i < len(b): if burst_probability >", "ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) -", "create_random_bits_list(self, len): \"\"\"create a random len bits long bitstring \"\"\" bits = []", "return new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply", "random numbers currently_bursting = True while currently_bursting and i < len( bits ):", "= \"\" for b in bits: if probability > numpy.random.random(): # roll random", "[] i = 0 while i < len(bits): if burst_probability > numpy.random.random(): #", "in range(len): bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len): \"\"\"create a random len", "bitstream ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append(", "Return: a string full of bits \"\"\" new_bits = \"\" for b in", "when bitstream ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random():", "and 1 to 0 else: new_bits += b return new_bits def randomise_bits_string_list(self, bits,", "+= b return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply flip", "1 return new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to", "error[0-1], the probability to leave the bursterror[0-1] Return: String of bits with added", "String of bits with added burst erorrs \"\"\" new_bits = \"\" i =", "= False i += 1 else: new_bits.append(bits[i]) i += 1 return new_bits def", "c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200))", "with added burst erorrs \"\"\" new_bits = [] i = 0 while i", "# turn 0 to 1 and 1 to 0 else: new_bit += str(b[i])", "str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function", "bits ): # stop when bitstream ends (simulate one bursterror and adjust i)", "def create_random_bits_string(self, len): \"\"\"create a random len bits long string \"\"\" bits =", "[] for b in bits: new_bit = \"\" for i in range(len(b)): if", "# roll random numbers new_bit += str((int(b[i]) + 1) % 2) # turn", "new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply flip", "i += 1 return new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A", "if burst_probability > numpy.random.random(): # roll random numbers currently_bursting = True while currently_bursting", "probability for an error[0-1], the probability to leave the bursterror[0-1] Return: String of", "a random len bits long string \"\"\" bits = \"\" for i in", "in a channel\"\"\" def __init__(self): return # _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create", "\"\"\" bits = [] for i in range(len): bits.append(numpy.random.randint(0, 2)) return bits def", "turn 0 to 1 and 1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting =", "in bits: if probability > numpy.random.random(): # roll random numbers new_bits += str((int(b)", "+= str( ((int(bits[i]) + 1) % 2) ) # turn 0 to 1", "1 to 0 randomly else: new_bits += str(bits[i]) currently_bursting = False i +=", "len(b): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting = True while", "len bits long string \"\"\" bits = \"\" for i in range(len): bits", "0) print(\"Differences found: \" + str(differences.count(True))) return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5))", "bits \"\"\" new_bits = [] for b in bits: new_bit = \"\" for", "False for b in bits: i = 0 new_bits.append(\"\") while i < len(b):", "1] += str(b[i]) currently_bursting = False i += 1 else: new_bits[len(new_bits) - 1]", "(c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr = c.randomise_bits_burst_string(rb,0.01,.9) # print", "i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1) % 2 ) #", "flip bits with the given probability ARGS: a list of bits, the probability", "error_rate_in_burst=0.9 ): \"\"\"A function to simply flip bits with the given probability ARGS:", "1 and 1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting = False i +=", "with added burst error \"\"\" new_bits = [] currently_bursting = False for b", "to 1 and 1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting = False i", "ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits +=", "\" + str(differences.count(True))) return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5))", "given probability ARGS: a list of bits, the probability for an error[0-1] Return:", "new_bits += str(bits[i]) currently_bursting = False i += 1 else: new_bits += str(bits[i])", "1 to 0 else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self,", "ARGS: a String of bits, the probability for an error[0-1], the probability to", "given probability ARGS: a String of bits, the probability for an error[0-1], the", "may result in higher errorrate\") min_length = min(len(bits1), len(bits2)) for i in range(min_length):", "a channel\"\"\" def __init__(self): return # _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a", "else: new_bits[len(new_bits) - 1] += str(b[i]) currently_bursting = False i += 1 else:", "a list of bits \"\"\" new_bits = [] for b in bits: if", "# stop when bitstream ends (simulate one bursterror and adjust i) if error_rate_in_burst", "roll random numbers new_bit += str((int(b[i]) + 1) % 2) # turn 0", "for i in range(len): bits += str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________", "1 and 1 to 0 else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def", "new_bits.append(\"\") while i < len(b): if burst_probability > numpy.random.random(): # roll random numbers", "False i += 1 else: new_bits.append(bits[i]) i += 1 return new_bits def randomise_bits_burst_string(", "numbers new_bits.append((b + 1) % 2) # turn 0 to 1 and 1", "bits += str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability):", "compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and higlight the differences\"\"\" differences = []", "len(bits): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting = True while", "bitstream ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits", "randomly else: new_bits.append(bits[i]) currently_bursting = False i += 1 else: new_bits.append(bits[i]) i +=", "one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i])", "> numpy.random.random(): # roll random numbers new_bits += str((int(b) + 1) % 2)", "i = 0 while i < len(bits): if burst_probability > numpy.random.random(): # roll", "list of bits, the probability for an error[0-1], the probability to leave the", "bitstream ends (simulate one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits)", "0 to 1 and 1 to 0 else: new_bits += b return new_bits", "print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr", "list of bits \"\"\" new_bits = [] for b in bits: if probability", "1 to 0 else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function", "if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1) % 2 ) # turn", "2)) return bits def create_random_bits_string(self, len): \"\"\"create a random len bits long string", "return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and", "2)) return bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function to", "return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply flip bits with", "new_bits += str((int(b) + 1) % 2) # turn 0 to 1 and", "str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function", "differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) #", "leave the bursterror[0-1] Return: list of bits with added burst erorrs \"\"\" new_bits", "self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits with the", "= False i += 1 else: new_bits += str(bits[i]) i += 1 return", "print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb=", "range(len): bits += str(numpy.random.randint(0, 2)) return bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits,", "of bits with added burst erorrs \"\"\" new_bits = \"\" i = 0", "error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits with the given probability ARGS:", "numpy.random.random(): # roll random numbers new_bits.append((b + 1) % 2) # turn 0", "for b in bits: if probability > numpy.random.random(): # roll random numbers new_bits.append((b", "= False for b in bits: i = 0 new_bits.append(\"\") while i <", "adjust i) if error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i]) + 1) %", "numpy.random.random(): new_bits[len(new_bits) - 1] += str( ((int(b[i]) + 1) % 2) ) #", "probability for an error[0-1], the probability to leave the bursterror[0-1] Return: list of", "bits with the given probability ARGS: a list of bits, the probability for", "\"\"\" new_bits = [] i = 0 while i < len(bits): if burst_probability", "the given probability ARGS: a String of bits, the probability for an error[0-1],", "\"\"\"create a random len bits long string \"\"\" bits = \"\" for i", "ARGS: a list of bits, the probability for an error[0-1], the probability to", "new_bits.append(bits[i]) i += 1 return new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ):", "currently_bursting = True while currently_bursting and i < len( bits ): # stop", "turn 0 to 1 and 1 to 0 else: new_bit += str(b[i]) new_bits.append(new_bit)", "+= 1 else: new_bits += str(bits[i]) i += 1 return new_bits # ______________compare", "funktions to simulate noise in a channel\"\"\" def __init__(self): return # _____________create bits___________________", "else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9,", "= [] i = 0 while i < len(bits): if burst_probability > numpy.random.random():", "else 0) print(\"Differences found: \" + str(differences.count(True))) return differences # c=channel_noise_simulator() # print", "# turn 0 to 1 and 1 to 0 else: new_bits.append(b) return new_bits", "currently_bursting = False i += 1 else: new_bits[len(new_bits) - 1] += str(b[i]) i", "def randomise_bits_string_list(self, bits, probability): \"\"\"A function to simply flip bits with the given", "+= str(b[i]) i += 1 return new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9", "in bits: new_bit = \"\" for i in range(len(b)): if probability > numpy.random.random():", "new_bit += str((int(b[i]) + 1) % 2) # turn 0 to 1 and", "print(\"Differences found: \" + str(differences.count(True))) return differences # c=channel_noise_simulator() # print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) #", "0 randomly else: new_bits.append(bits[i]) currently_bursting = False i += 1 else: new_bits.append(bits[i]) i", "i += 1 return new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ): \"\"\"A", "numpy.random.random(): new_bits += str( ((int(bits[i]) + 1) % 2) ) # turn 0", "+= 1 return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two", "bits1[i] != bits2[i] else 0) print(\"Differences found: \" + str(differences.count(True))) return differences #", "adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1) % 2 )", "new_bits = [] for b in bits: if probability > numpy.random.random(): # roll", "0 else: new_bits += b return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A function", "burst_probability, error_rate_in_burst=0.9 ): \"\"\"A function to simply flip bits with the given probability", "currently_bursting = False for b in bits: i = 0 new_bits.append(\"\") while i", "of bits, the probability for an error[0-1], the probability to leave the bursterror[0-1]", "return bits def create_random_bits_string(self, len): \"\"\"create a random len bits long string \"\"\"", "\"\" for i in range(len): bits += str(numpy.random.randint(0, 2)) return bits # _____________Randoise", "else: new_bits.append(bits[i]) currently_bursting = False i += 1 else: new_bits.append(bits[i]) i += 1", "new_bit += str(b[i]) new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ):", "bits, probability): \"\"\"A function to simply flip bits with the given probability ARGS:", "bits2[i] else 0) print(\"Differences found: \" + str(differences.count(True))) return differences # c=channel_noise_simulator() #", "str(bits[i]) currently_bursting = False i += 1 else: new_bits += str(bits[i]) i +=", "i += 1 return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare", "# print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) #", "burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to simply flip bits with the given probability", "[] currently_bursting = False for b in bits: i = 0 new_bits.append(\"\") while", "+= str(bits[i]) i += 1 return new_bits # ______________compare bits__________________________ def compare_and_highlight_differences(self, bits1,", "# print (c.randomise_bits_list([1,1,1,1,0,0,0,0,1],0.5)) # print (c.randomise_bits_string(\"1101110\",0.5)) # print (c.compare_and_highlight_differences([1,1,1,0,0,1,1,0,0,1,0,1,1,1],[0,1,1,0,0,1,1,1,1,1,0,1,0,1])) # print (c.create_random_bits_list(200)) #", "ARGS: a list of bits, the probability for an error[0-1] RETURN: a list", "bits: if probability > numpy.random.random(): # roll random numbers new_bits += str((int(b) +", "> numpy.random.random(): new_bits += str( ((int(bits[i]) + 1) % 2) ) # turn", "if probability > numpy.random.random(): # roll random numbers new_bits.append((b + 1) % 2)", "a list of bits, the probability for an error[0-1] Return: a string full", "bitlists and higlight the differences\"\"\" differences = [] if len(bits1) != len(bits2): print(\"waning,", "bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits.append( (bits[i] + 1) %", "create_random_bits_string(self, len): \"\"\"create a random len bits long string \"\"\" bits = \"\"", "randomise_bits_string(self, bits, probability): \"\"\"A function to simply flip bits with the given probability", "else: new_bits.append(bits[i]) i += 1 return new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9,", "i += 1 else: new_bits += str(bits[i]) i += 1 return new_bits #", "long string \"\"\" bits = \"\" for i in range(len): bits += str(numpy.random.randint(0,", "return # _____________create bits___________________ def create_random_bits_list(self, len): \"\"\"create a random len bits long", "1 else: new_bits.append(bits[i]) i += 1 return new_bits def randomise_bits_burst_string( self, bits, burst_probability,", "0 to 1 and 1 to 0 randomly else: new_bits.append(bits[i]) currently_bursting = False", "= [] for b in bits: if probability > numpy.random.random(): # roll random", "+ 1) % 2) # turn 0 to 1 and 1 to 0", "roll random numbers currently_bursting = True while currently_bursting and i < len( bits", "< len(b): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting = True", "\"\"\"create a random len bits long bitstring \"\"\" bits = [] for i", "to 1 and 1 to 0 else: new_bits += b return new_bits def", "an error[0-1], the probability to leave the bursterror[0-1] Return: list of bits with", "!= bits2[i] else 0) print(\"Differences found: \" + str(differences.count(True))) return differences # c=channel_noise_simulator()", "new_bits.append(new_bit) return new_bits def randomise_bits_burst_string_list( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to", "else: new_bits += str(bits[i]) i += 1 return new_bits # ______________compare bits__________________________ def", "to 0 else: new_bits += b return new_bits def randomise_bits_string_list(self, bits, probability): \"\"\"A", "bits with added burst erorrs \"\"\" new_bits = \"\" i = 0 while", "while i < len(b): if burst_probability > numpy.random.random(): # roll random numbers currently_bursting", "and i < len( b ): # stop when bitstream ends (simulate one", "min_length = min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1 if bits1[i] != bits2[i]", "# roll random numbers new_bits.append((b + 1) % 2) # turn 0 to", "new_bits += str( ((int(bits[i]) + 1) % 2) ) # turn 0 to", "b in bits: new_bit = \"\" for i in range(len(b)): if probability >", "bits \"\"\" new_bits = [] for b in bits: if probability > numpy.random.random():", "min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1 if bits1[i] != bits2[i] else 0)", "range(min_length): differences.append(1 if bits1[i] != bits2[i] else 0) print(\"Differences found: \" + str(differences.count(True)))", "bits__________________________ def compare_and_highlight_differences(self, bits1, bits2): \"\"\"compare two bitlists and higlight the differences\"\"\" differences", "ARGS: a list of bits, the probability for an error[0-1] Return: a string", "one bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] +=", "usefull funktions to simulate noise in a channel\"\"\" def __init__(self): return # _____________create", "added burst error \"\"\" new_bits = [] currently_bursting = False for b in", "currently_bursting = False i += 1 else: new_bits.append(bits[i]) i += 1 return new_bits", "+= str((int(b) + 1) % 2) # turn 0 to 1 and 1", "0 to 1 and 1 to 0 else: new_bits.append(b) return new_bits def randomise_bits_string(self,", "of bits with added burst error \"\"\" new_bits = [] currently_bursting = False", "noise in a channel\"\"\" def __init__(self): return # _____________create bits___________________ def create_random_bits_list(self, len):", "1 return new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function to", "i < len( b ): # stop when bitstream ends (simulate one bursterror", "given probability ARGS: a list of bits, the probability for an error[0-1] RETURN:", "0 else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits, probability): \"\"\"A function to simply", "bits = \"\" for i in range(len): bits += str(numpy.random.randint(0, 2)) return bits", "1) % 2) # turn 0 to 1 and 1 to 0 else:", "0 to 1 and 1 to 0 randomly else: new_bits[len(new_bits) - 1] +=", "str(b[i]) i += 1 return new_bits def randomise_bits_burst_list( self, bits, burst_probability, error_rate_in_burst=0.9 ):", "else: new_bits += str(bits[i]) currently_bursting = False i += 1 else: new_bits +=", "= \"\" for i in range(len): bits += str(numpy.random.randint(0, 2)) return bits #", "(c.create_random_bits_list(200)) # rb= c.create_random_bits_string(200) # rr = c.randomise_bits_burst_string(rb,0.01,.9) # print (c.compare_and_highlight_differences(rb,rr)) # \"\"\"", "bits.append(numpy.random.randint(0, 2)) return bits def create_random_bits_string(self, len): \"\"\"create a random len bits long", "def create_random_bits_list(self, len): \"\"\"create a random len bits long bitstring \"\"\" bits =", "a String of bits, the probability for an error[0-1], the probability to leave", "= [] currently_bursting = False for b in bits: i = 0 new_bits.append(\"\")", "bitstring \"\"\" bits = [] for i in range(len): bits.append(numpy.random.randint(0, 2)) return bits", "leave the bursterror[0-1] Return: String of bits with added burst erorrs \"\"\" new_bits", "bursterror and adjust i) if error_rate_in_burst > numpy.random.random(): new_bits[len(new_bits) - 1] += str(", "return bits # _____________Randoise bits______________________ def randomise_bits_list(self, bits, probability): \"\"\"A function to simply", "to simulate noise in a channel\"\"\" def __init__(self): return # _____________create bits___________________ def", "full of bits \"\"\" new_bits = \"\" for b in bits: if probability", "import numpy class channel_noise_simulator: \"\"\"Class to hold usefull funktions to simulate noise in", "for b in bits: new_bit = \"\" for i in range(len(b)): if probability", "to 1 and 1 to 0 randomly else: new_bits[len(new_bits) - 1] += str(b[i])", "long bitstring \"\"\" bits = [] for i in range(len): bits.append(numpy.random.randint(0, 2)) return", "2) # turn 0 to 1 and 1 to 0 else: new_bits.append(b) return", "bits long string \"\"\" bits = \"\" for i in range(len): bits +=", "to 1 and 1 to 0 else: new_bit += str(b[i]) new_bits.append(new_bit) return new_bits", "numbers currently_bursting = True while currently_bursting and i < len( bits ): #", "burst error \"\"\" new_bits = [] currently_bursting = False for b in bits:", "i in range(len(b)): if probability > numpy.random.random(): # roll random numbers new_bit +=", "numpy class channel_noise_simulator: \"\"\"Class to hold usefull funktions to simulate noise in a", "[] if len(bits1) != len(bits2): print(\"waning, different lengths detected. may result in higher", "simulate noise in a channel\"\"\" def __init__(self): return # _____________create bits___________________ def create_random_bits_list(self,", "!= len(bits2): print(\"waning, different lengths detected. may result in higher errorrate\") min_length =", "probability > numpy.random.random(): # roll random numbers new_bits += str((int(b) + 1) %", "+= 1 return new_bits def randomise_bits_burst_string( self, bits, burst_probability, error_rate_in_burst=0.9, ): \"\"\"A function", "and higlight the differences\"\"\" differences = [] if len(bits1) != len(bits2): print(\"waning, different", "the probability for an error[0-1] Return: a string full of bits \"\"\" new_bits", "= min(len(bits1), len(bits2)) for i in range(min_length): differences.append(1 if bits1[i] != bits2[i] else", "error_rate_in_burst > numpy.random.random(): new_bits += str( ((int(bits[i]) + 1) % 2) ) #", "roll random numbers new_bits.append((b + 1) % 2) # turn 0 to 1", "error[0-1] RETURN: a list of bits \"\"\" new_bits = [] for b in", "0 new_bits.append(\"\") while i < len(b): if burst_probability > numpy.random.random(): # roll random", "len( bits ): # stop when bitstream ends (simulate one bursterror and adjust", "new_bits = [] i = 0 while i < len(bits): if burst_probability >", "# roll random numbers new_bits += str((int(b) + 1) % 2) # turn", "to 1 and 1 to 0 else: new_bits.append(b) return new_bits def randomise_bits_string(self, bits," ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase):", "world\" # 2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "2020 Curtin University # # Licensed under the Apache License, Version 2.0 (the", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "# limitations under the License. # Author: <NAME> import unittest from observatory.platform.cli.click_utils import", "License. # You may obtain a copy of the License at # #", "output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces output = indent(original_str,", "output) # 5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check", "self.assertEqual(output, \"# \") input_str = \"Hello world\" output = comment(input_str) self.assertEqual(output, \"# Hello", "\"hello world\" # 2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) #", "output) # Check that values below 0 raise assertion error with self.assertRaises(AssertionError): indent(original_str,", "self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str = \"\" output", "self.assertEqual(f\" {original_str}\", output) # 3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output)", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\" # 2 spaces output =", "and # limitations under the License. # Author: <NAME> import unittest from observatory.platform.cli.click_utils", "compliance with the License. # You may obtain a copy of the License", "INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str =", "3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces output", "raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self):", "0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str = \"\" output = comment(input_str)", "permissions and # limitations under the License. # Author: <NAME> import unittest from", "# Author: <NAME> import unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4,", "= indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces output = indent(original_str, INDENT3)", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "this file except in compliance with the License. # You may obtain a", "output) # 3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "= indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces output = indent(original_str, INDENT2)", "you may not use this file except in compliance with the License. #", "output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces output = indent(original_str,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "under the License. # Author: <NAME> import unittest from observatory.platform.cli.click_utils import ( INDENT1,", "with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str = \"\"", "unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, ) class", "ANY KIND, either express or implied. # See the License for the specific", "def test_comment(self): input_str = \"\" output = comment(input_str) self.assertEqual(output, \"# \") input_str =", "# 5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that", "Copyright 2020 Curtin University # # Licensed under the Apache License, Version 2.0", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "use this file except in compliance with the License. # You may obtain", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that values below 0 raise", "not use this file except in compliance with the License. # You may", "# 4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "governing permissions and # limitations under the License. # Author: <NAME> import unittest", "indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\" # 2 spaces", "self.assertEqual(f\" {original_str}\", output) # Check that values below 0 raise assertion error with", "See the License for the specific language governing permissions and # limitations under", "\"# \") input_str = \"Hello world\" output = comment(input_str) self.assertEqual(output, \"# Hello world\")", "indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that values below 0 raise assertion", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "# 3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "import ( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self):", "{original_str}\", output) # 5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) #", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\",", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces output =", "with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str = \"\" output = comment(input_str) self.assertEqual(output,", "indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\"", "5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that values", "INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\",", "OF ANY KIND, either express or implied. # See the License for the", "INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that values below 0 raise assertion error", "2.0 (the \"License\"); # you may not use this file except in compliance", "INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\"", "# you may not use this file except in compliance with the License.", "for the specific language governing permissions and # limitations under the License. #", "agreed to in writing, software # distributed under the License is distributed on", "self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str = \"\" output = comment(input_str) self.assertEqual(output, \"#", "{original_str}\", output) # 4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) #", "values below 0 raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "import unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, )", "spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces output =", "(the \"License\"); # you may not use this file except in compliance with", "that values below 0 raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError):", "test_comment(self): input_str = \"\" output = comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello", "below 0 raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1)", "# # Unless required by applicable law or agreed to in writing, software", "{original_str}\", output) # 3 spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) #", "express or implied. # See the License for the specific language governing permissions", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "specific language governing permissions and # limitations under the License. # Author: <NAME>", "<NAME> import unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent,", "by applicable law or agreed to in writing, software # distributed under the", "limitations under the License. # Author: <NAME> import unittest from observatory.platform.cli.click_utils import (", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "test_indent(self): original_str = \"hello world\" # 2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\"", "output = comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello world\" output = comment(input_str)", "\"\" output = comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello world\" output =", "def test_indent(self): original_str = \"hello world\" # 2 spaces output = indent(original_str, INDENT1)", "License. # Author: <NAME> import unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3,", "either express or implied. # See the License for the specific language governing", "comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\" # 2", "the License. # Author: <NAME> import unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "University # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "original_str = \"hello world\" # 2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\",", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\" # 2 spaces output = indent(original_str,", "= \"hello world\" # 2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output)", "error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str =", "output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that values below 0", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "Author: <NAME> import unittest from observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4, comment,", "2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces output", "file except in compliance with the License. # You may obtain a copy", "INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\" #", "indent(original_str, -1) def test_comment(self): input_str = \"\" output = comment(input_str) self.assertEqual(output, \"# \")", "# Check that values below 0 raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0)", "indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\"", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "input_str = \"\" output = comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello world\"", "License for the specific language governing permissions and # limitations under the License.", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "the License. # You may obtain a copy of the License at #", "to in writing, software # distributed under the License is distributed on an", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "self.assertEqual(f\" {original_str}\", output) # 4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output)", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "implied. # See the License for the specific language governing permissions and #", "spaces output = indent(original_str, INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces output =", "( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str", ") class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello world\" # 2 spaces output", "\"License\"); # you may not use this file except in compliance with the", "output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces output = indent(original_str,", "Check that values below 0 raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "required by applicable law or agreed to in writing, software # distributed under", "# 2 spaces output = indent(original_str, INDENT1) self.assertEqual(f\" {original_str}\", output) # 3 spaces", "output) # 4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5", "assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str", "language governing permissions and # limitations under the License. # Author: <NAME> import", "applicable law or agreed to in writing, software # distributed under the License", "{original_str}\", output) # Check that values below 0 raise assertion error with self.assertRaises(AssertionError):", "self.assertEqual(f\" {original_str}\", output) # 5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output)", "the specific language governing permissions and # limitations under the License. # Author:", "-1) def test_comment(self): input_str = \"\" output = comment(input_str) self.assertEqual(output, \"# \") input_str", "= comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello world\" output = comment(input_str) self.assertEqual(output,", "indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def test_comment(self): input_str = \"\" output =", "comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello world\" output = comment(input_str) self.assertEqual(output, \"#", "INDENT2) self.assertEqual(f\" {original_str}\", output) # 4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\",", "or agreed to in writing, software # distributed under the License is distributed", "= indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces output = indent(original_str, INDENT4)", "INDENT2, INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def test_indent(self): original_str = \"hello", "or implied. # See the License for the specific language governing permissions and", "indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces output = indent(original_str, INDENT4) self.assertEqual(f\"", "= \"\" output = comment(input_str) self.assertEqual(output, \"# \") input_str = \"Hello world\" output", "Curtin University # # Licensed under the Apache License, Version 2.0 (the \"License\");", "0 raise assertion error with self.assertRaises(AssertionError): indent(original_str, 0) with self.assertRaises(AssertionError): indent(original_str, -1) def", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "observatory.platform.cli.click_utils import ( INDENT1, INDENT2, INDENT3, INDENT4, comment, indent, ) class TestClick(unittest.TestCase): def", "4 spaces output = indent(original_str, INDENT3) self.assertEqual(f\" {original_str}\", output) # 5 spaces output", "with the License. # You may obtain a copy of the License at", "spaces output = indent(original_str, INDENT4) self.assertEqual(f\" {original_str}\", output) # Check that values below", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "# Copyright 2020 Curtin University # # Licensed under the Apache License, Version", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "in metadata.items(): if key not in object_metadata: return False elif value is not", "not in object_metadata: return False elif value is not None and value !=", "def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body,", "metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type,", "or _matches_field(handler, changed_fields or [])) and (not handler.labels or _matches_labels(handler, body)) and (not", "[])) and (not handler.labels or _matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler, body))", "return any(field[:len(handler.field)] == handler.field for field in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler,", "return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata", "_matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for", "key, value in metadata.items(): if key not in object_metadata: return False elif value", "changed_fields): return any(field[:len(handler.field)] == handler.field for field in changed_fields) def _matches_labels(handler, body): return", "body)) and (not handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)]", "body.get('metadata', {}).get(metadata_type, {}) for key, value in metadata.items(): if key not in object_metadata:", "def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata =", "in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return", "_matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations')", "changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler,", "def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {})", "{}).get(metadata_type, {}) for key, value in metadata.items(): if key not in object_metadata: return", "and (not handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] ==", "object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key, value in metadata.items(): if key not", "body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key,", "handler.field or _matches_field(handler, changed_fields or [])) and (not handler.labels or _matches_labels(handler, body)) and", "def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for field in changed_fields) def _matches_labels(handler,", "getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key, value in metadata.items(): if", "_matches_field(handler, changed_fields or [])) and (not handler.labels or _matches_labels(handler, body)) and (not handler.annotations", "(not handler.labels or _matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler, body)) ) def", "key not in object_metadata: return False elif value is not None and value", "for key, value in metadata.items(): if key not in object_metadata: return False elif", "metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata", "def match(handler, body, changed_fields=None): return ( (not handler.field or _matches_field(handler, changed_fields or []))", "changed_fields or [])) and (not handler.labels or _matches_labels(handler, body)) and (not handler.annotations or", "(not handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field", "= body.get('metadata', {}).get(metadata_type, {}) for key, value in metadata.items(): if key not in", "field in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body):", "metadata_type): metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key, value", "( (not handler.field or _matches_field(handler, changed_fields or [])) and (not handler.labels or _matches_labels(handler,", "_matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body,", "handler.field for field in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def", "for field in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler,", "match(handler, body, changed_fields=None): return ( (not handler.field or _matches_field(handler, changed_fields or [])) and", "metadata.items(): if key not in object_metadata: return False elif value is not None", "_matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for field in changed_fields) def _matches_labels(handler, body):", "handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for", "body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata',", "body, changed_fields=None): return ( (not handler.field or _matches_field(handler, changed_fields or [])) and (not", "False elif value is not None and value != object_metadata[key]: return False else:", "body): return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def", "or [])) and (not handler.labels or _matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler,", "_matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type) object_metadata =", ") def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for field in changed_fields) def", "metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key, value in metadata.items(): if key", "or _matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields):", "metadata = getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key, value in", "value in metadata.items(): if key not in object_metadata: return False elif value is", "elif value is not None and value != object_metadata[key]: return False else: continue", "body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for field in changed_fields)", "return _matches_metadata(handler=handler, body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler,", "_matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler,", "if key not in object_metadata: return False elif value is not None and", "in object_metadata: return False elif value is not None and value != object_metadata[key]:", "_matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for field in", "and (not handler.labels or _matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler, body)) )", "body=body, metadata_type='labels') def _matches_annotations(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type):", "is not None and value != object_metadata[key]: return False else: continue return True", "any(field[:len(handler.field)] == handler.field for field in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body,", "(not handler.field or _matches_field(handler, changed_fields or [])) and (not handler.labels or _matches_labels(handler, body))", "body): return _matches_metadata(handler=handler, body=body, metadata_type='annotations') def _matches_metadata(handler, body, metadata_type): metadata = getattr(handler, metadata_type)", "{}) for key, value in metadata.items(): if key not in object_metadata: return False", "_matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return", "object_metadata: return False elif value is not None and value != object_metadata[key]: return", "return ( (not handler.field or _matches_field(handler, changed_fields or [])) and (not handler.labels or", "value is not None and value != object_metadata[key]: return False else: continue return", "return False elif value is not None and value != object_metadata[key]: return False", "= getattr(handler, metadata_type) object_metadata = body.get('metadata', {}).get(metadata_type, {}) for key, value in metadata.items():", "== handler.field for field in changed_fields) def _matches_labels(handler, body): return _matches_metadata(handler=handler, body=body, metadata_type='labels')", "or _matches_annotations(handler, body)) ) def _matches_field(handler, changed_fields): return any(field[:len(handler.field)] == handler.field for field", "handler.labels or _matches_labels(handler, body)) and (not handler.annotations or _matches_annotations(handler, body)) ) def _matches_field(handler,", "changed_fields=None): return ( (not handler.field or _matches_field(handler, changed_fields or [])) and (not handler.labels" ]
[ "ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE", "f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0],", "b)[0])[0], b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size,", "self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(size, size * 2)", "self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size, size", "\"\"\" import unittest import matasano.blocks import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def", ") self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size ) def test_un_pkcs(self): b =", "for i in range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass", "b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size)", ") def test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\")", "matasano.blocks import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks", "16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b + (b\"\\x10\"", "self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE", "<< 1, size ) def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20", "\"\"\" Test block crypto. \"\"\" import unittest import matasano.blocks import matasano.util __author__ =", "size * 2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16 self.assertEqual(", "un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded", "class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\")", "matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1),", "SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b,", "BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\") k_len", "size = 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\"", "self.assertEqual( f(key, f(key, b), decrypt=True), b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc key", "= \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded,", "def test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual(", "test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0), 0 ) self.assertEqual(", "matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b + (b\"\\x10\" * size)) def", "= \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"),", "matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size)", ") self.assertEqual( f(size, 10), slice(0, size * 11) ) def test_ith_byte_in_block(self): f =", "b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size,", "b ) self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self):", "i in range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l", "except IndexError: pass l = bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\")", "matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True),", "matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size)", "3 blocks = f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for i", "matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key,", "#!/usr/bin/env/ python # encoding: utf-8 \"\"\" Test block crypto. \"\"\" import unittest import", "f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for i in blocks), len(b)", "= 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b )", "11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0),", "slice(0, size * 11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16", "1), slice(0, size * 2) ) self.assertEqual( f(size, 10), slice(0, size * 11)", "size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size )", "padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded", "def test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual(", "size = 16 self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size, 1), 0 )", ") self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size, size * 2), 2 )", "def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size)", "__author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this", "size) ) self.assertEqual( f(size, 1), slice(0, size * 2) ) self.assertEqual( f(size, 10),", "\"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\"", "key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key,", "matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" * 4) size = 16", "unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size ) def test_un_pkcs(self): b", "= b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f", "utf-8 \"\"\" Test block crypto. \"\"\" import unittest import matasano.blocks import matasano.util __author__", "= 16 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded", "slice(0, size * 2) ) self.assertEqual( f(size, 10), slice(0, size * 11) )", "= matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0],", "size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual(", "self.assertEqual( f(size, 1), slice(size, size * 2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block", "self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(0, size * 2)", "4) size = 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded,", "for i in blocks), len(b) ) l = list() for i in range(len(blocks[0])):", "1 ) self.assertEqual( f(size, size * 2), 2 ) if __name__ == '__main__':", "2) ) self.assertEqual( f(size, 10), slice(0, size * 11) ) def test_ith_byte_in_block(self): f", "ICE BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size", "def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0), 0 )", "iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key,", ") def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b,", "matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size ) def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\")", "test\".encode(\"ascii\") k_len = 3 blocks = f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual(", "= 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" *", "f = matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual(", "BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size )", "size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16 padded = matasano.blocks.pkcs_7(b,", "b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b )", "= matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size,", "0), 0 ) self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size, size), 1 )", "b = \"this is a test\".encode(\"ascii\") k_len = 3 blocks = f(b, k_len)", "+ b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException,", "BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises(", "matasano.blocks.un_pkcs_1_5, padded << 1, size ) def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size", "size) self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE", "list() for i in range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError:", "k_len ) self.assertEqual( sum(len(i) for i in blocks), len(b) ) l = list()", "padded, size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b", ") self.assertEqual( f(size, 1), slice(size, size * 2) ) def test_bytes_to_block(self): f =", "f(key, f(key, b), decrypt=True), b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc key =", "= 20 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size", "f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr", "f = matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\") k_len = 3 blocks", "b + (b\"\\x10\" * size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size =", "* 4) size = 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2)", ") def test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\")", "l = bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self):", "matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded,", "test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\") k_len = 3", "f = matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size,", "padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE", "matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16 padded = matasano.blocks.pkcs_7(b, size) un_padded =", "* 2) self.assertEqual(padded, b + (b\"\\x10\" * size)) def test_pkcs_1_5(self): b = \"YELLOW", "unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded <<", ") self.assertEqual( f(size, 1), slice(0, size * 2) ) self.assertEqual( f(size, 10), slice(0,", "b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b =", "= 16 self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size, 1), 0 ) self.assertEqual(", "= \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block", "blocks = f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for i in", "size = 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b", "size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(size,", "SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f", "0 ) self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size, size * 2), 2", "= matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE ICE", "self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b + (b\"\\x10\" * size)) def test_pkcs_1_5(self): b", "f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key()", "self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size ) def test_un_pkcs(self): b = \"YELLOW", "test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual(", "un_padded) size = 16 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b,", "= \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b ) def", "f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b", "self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size ) def", ") self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f", "k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for i in blocks), len(b) )", "= matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\") k_len = 3 blocks =", "size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(0,", "b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr key =", "= matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises(", "size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" * 4) size = 16 padded", "f(size, 1), slice(0, size * 2) ) self.assertEqual( f(size, 10), slice(0, size *", "self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException,", "l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded =", "f(size, 1), slice(size, size * 2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size", ") padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def", "\"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size)", "range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l = bytes(l) self.assertEqual( b, l )", "2) self.assertEqual(padded, b + (b\"\\x10\" * size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\")", "decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\")", "16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(0, size *", "10), slice(0, size * 11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size =", ") self.assertEqual( f(size, size * 2), 2 ) if __name__ == '__main__': unittest.main()", "* 2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size,", "test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key,", "self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f =", "padded << 1, size ) def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size =", "self.assertEqual( f(size, 1), slice(0, size * 2) ) self.assertEqual( f(size, 10), slice(0, size", "\"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b", "sum(len(i) for i in blocks), len(b) ) l = list() for i in", "self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" * 4) size = 16 padded =", "import matasano.blocks import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f =", "len(blocks), k_len ) self.assertEqual( sum(len(i) for i in blocks), len(b) ) l =", "size) self.assertEqual(b, un_padded) size = 16 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded,", "in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l = bytes(l) self.assertEqual( b, l", "b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f =", "b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) un_padded =", "size) self.assertEqual(padded, b + b\"\\x04\" * 4) size = 16 padded = matasano.blocks.pkcs_7(b,", "(b\"\\x10\" * size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded", "\"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self):", "b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\")", "SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b +", "padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded =", "b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size ) def test_un_pkcs(self):", "= 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(0, size", "IndexError: pass l = bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") )", "+ (b\"\\x10\" * size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20", "b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f =", "self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size, size * 2), 2 ) if", "self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded,", "b), decrypt=True), b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\")", "\"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size", "= list() for i in range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i]) except", "size * 2) ) self.assertEqual( f(size, 10), slice(0, size * 11) ) def", "size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b =", "self.assertEqual(padded, b + b\"\\x04\" * 4) size = 16 padded = matasano.blocks.pkcs_7(b, size)", "self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b,", "* size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded =", "slice(size, size * 2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16", "= \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b ) def test_aes_cbc(self): f =", "= f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for i in blocks),", "= matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded,", "= 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(size, size", "self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb key =", "matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7,", "decrypt=True), b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b", "= matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16 padded = matasano.blocks.pkcs_7(b, size) un_padded", "def test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\") k_len =", "f(size, size), 1 ) self.assertEqual( f(size, size * 2), 2 ) if __name__", "* 11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size,", "padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded)", "size = 16 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded)", "= \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual(", "matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b", "for j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l = bytes(l) self.assertEqual(", "matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b =", "= matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b,", "self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20", "b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5,", "block crypto. \"\"\" import unittest import matasano.blocks import matasano.util __author__ = 'aldur' class", "l = list() for i in range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i])", "size * 2) self.assertEqual(padded, b + (b\"\\x10\" * size)) def test_pkcs_1_5(self): b =", ") self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size,", "a test\".encode(\"ascii\") k_len = 3 blocks = f(b, k_len) self.assertEqual( len(blocks), k_len )", ") self.assertEqual( sum(len(i) for i in blocks), len(b) ) l = list() for", "self.assertEqual(padded, b + (b\"\\x10\" * size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size", "matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1),", "self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW", "try: l.append(blocks[j][i]) except IndexError: pass l = bytes(l) self.assertEqual( b, l ) self.assertEqual(", "size)) def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b,", "= \"this is a test\".encode(\"ascii\") k_len = 3 blocks = f(b, k_len) self.assertEqual(", "l.append(blocks[j][i]) except IndexError: pass l = bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"),", "un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\"", "f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key,", "ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\"", "BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE ICE", "f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr key", "b\"\\x04\" * 4) size = 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size *", "un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded =", "\"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key,", "\"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0],", "padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" * 4) size", "= matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b + (b\"\\x10\" * size))", "pass l = bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def", "self.assertEqual( f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size =", "= bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b", "size), 1 ) self.assertEqual( f(size, size * 2), 2 ) if __name__ ==", "matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16 padded =", "len(b) ) l = list() for i in range(len(blocks[0])): for j in range(len(blocks)):", "= matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual(", "\"this is a test\".encode(\"ascii\") k_len = 3 blocks = f(b, k_len) self.assertEqual( len(blocks),", "matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size, 1), 0", "test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) un_padded", "= matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size,", "padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded =", "= b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE", "matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW", "16 self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size,", "b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded =", "matasano.blocks.split_blocks b = \"this is a test\".encode(\"ascii\") k_len = 3 blocks = f(b,", "'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this is a", "= 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b = \"this is", "size = 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b", "SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b ) def test_aes_cbc(self):", "matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\")", "f(key, b), decrypt=True), b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW", "bytes(l) self.assertEqual( b, l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b =", "f = matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key,", "= matasano.blocks.aes_ecb key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b),", "size = 20 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded)", ") def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0), slice(0,", "+ b\"\\x04\" * 4) size = 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size", "self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for i in blocks), len(b) ) l", "20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" * 4)", "import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f = matasano.blocks.split_blocks b", "f(size, 0), 0 ) self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size, size), 1", "l ) self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size", "# encoding: utf-8 \"\"\" Test block crypto. \"\"\" import unittest import matasano.blocks import", "padded, size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size", "self.assertEqual( f(size, 10), slice(0, size * 11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block", ") unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded", "= 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b +", "b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded", "size * 11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16 self.assertEqual(", "j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l = bytes(l) self.assertEqual( b,", "0), slice(0, size) ) self.assertEqual( f(size, 1), slice(0, size * 2) ) self.assertEqual(", "slice(0, size) ) self.assertEqual( f(size, 1), slice(0, size * 2) ) self.assertEqual( f(size,", "= matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1,", "size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded", "blocks), len(b) ) l = list() for i in range(len(blocks[0])): for j in", "\"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc", "= \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b )", "b ) def test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b =", "SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" +", "self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size, size),", "iv=iv)[0], decrypt=True, iv=iv)[0], b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW", "b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises(", "crypto. \"\"\" import unittest import matasano.blocks import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase):", "1, size ) def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded", "self.assertEqual( sum(len(i) for i in blocks), len(b) ) l = list() for i", "in range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l =", "f(size, 10), slice(0, size * 11) ) def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size", "encoding: utf-8 \"\"\" Test block crypto. \"\"\" import unittest import matasano.blocks import matasano.util", "def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0), slice(0, size)", "b + b\"\\x04\" * 4) size = 16 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded),", "= \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded,", "b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0),", "b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b )", "20 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size =", "in blocks), len(b) ) l = list() for i in range(len(blocks[0])): for j", "test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded),", "= matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16 padded", "iv=iv)[0], b ) def test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b", "size) ) self.assertEqual( f(size, 1), slice(size, size * 2) ) def test_bytes_to_block(self): f", "def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b, size)", "self.assertEqual(b, un_padded) size = 16 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size)", "decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True, iv=iv)[0], b ) def", ") def test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\")", "f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(0, size * 2) )", "key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b)[0])[0], b )", "= matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0), 0 ) self.assertEqual( f(size, 1),", "matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded ) self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_1_5, padded << 1, size", ") def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0), slice(0,", "= matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size) self.assertEqual(padded, b + b\"\\x04\" * 4) size =", "BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb key", "ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self): f = matasano.blocks.aes_ecb", "matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException,", "size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b + (b\"\\x10\" * size)) def test_pkcs_1_5(self):", "b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b ) def test_aes_cbc(self): f", ") self.assertEqual( b.decode(\"ascii\"), l.decode(\"ascii\") ) def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size =", "padded = matasano.blocks.pkcs_7(b, size) self.assertEqual(len(padded), size * 2) self.assertEqual(padded, b + (b\"\\x10\" *", "def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0), slice(0, size)", "un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\",", "import unittest import matasano.blocks import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self):", "\"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded = matasano.blocks.un_pkcs_1_5(padded, size) self.assertEqual( b, unpadded )", "unittest import matasano.blocks import matasano.util __author__ = 'aldur' class BlocksTestCase(unittest.TestCase): def test_split_blocks(self): f", "matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b, iv=iv)[0],", "size ) padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size )", "self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b ) self.assertEqual( f(key, f(key, b, iv=iv)[0], decrypt=True,", "2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0),", "def test_pkcs_1_5(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_1_5(b, size)", "SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0], decrypt=True)[0], b", "= 3 blocks = f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i) for", "f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(size, size * 2) )", ") def test_ith_byte_in_block(self): f = matasano.blocks.ith_byte_block size = 16 self.assertEqual( f(size, 0), 0", "key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b", ") l = list() for i in range(len(blocks[0])): for j in range(len(blocks)): try:", "= matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded,", "16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual( f(size, 1), slice(size, size *", "0), slice(0, size) ) self.assertEqual( f(size, 1), slice(size, size * 2) ) def", "1), slice(size, size * 2) ) def test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size =", "* 2) ) self.assertEqual( f(size, 10), slice(0, size * 11) ) def test_ith_byte_in_block(self):", "padded = b\"ICE ICE BABY\\x01\\x02\\x03\\x04\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) def test_aes_ecb(self):", "= b\"ICE ICE BABY\\x04\\x04\\x04\\x04\" un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded", "f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16 self.assertEqual(", "i in blocks), len(b) ) l = list() for i in range(len(blocks[0])): for", "1), 0 ) self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size, size * 2),", "test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv =", "20 padded = matasano.blocks.pkcs_1_5(b, size) self.assertEqual( padded.to_bytes(size, \"big\"), b\"\\x00\\x02\\xff\\x00\" + b ) unpadded", "f(size, 1), 0 ) self.assertEqual( f(size, size), 1 ) self.assertEqual( f(size, size *", ") def test_pkcs_7(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded = matasano.blocks.pkcs_7(b,", "test_bytes_to_block(self): f = matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0), slice(0, size) )", "size ) def test_un_pkcs(self): b = \"YELLOW SUBMARINE\".encode(\"ascii\") size = 20 padded =", "size) self.assertEqual(b\"ICE ICE BABY\", un_padded) padded = b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7,", "Test block crypto. \"\"\" import unittest import matasano.blocks import matasano.util __author__ = 'aldur'", "\"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key, f(key, b), decrypt=True), b ) def", "b\"ICE ICE BABY\\x05\\x05\\x05\\x05\" self.assertRaises( matasano.blocks.BadPaddingException, matasano.blocks.un_pkcs_7, padded, size ) padded = b\"ICE ICE", "k_len = 3 blocks = f(b, k_len) self.assertEqual( len(blocks), k_len ) self.assertEqual( sum(len(i)", "= \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv = matasano.util.random_aes_key() self.assertEqual( f(key, f(key, b)[0],", "test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16 self.assertEqual( f(size, 0), slice(0, size) )", "0 ) self.assertEqual( f(size, 1), 0 ) self.assertEqual( f(size, size), 1 ) self.assertEqual(", "range(len(blocks[0])): for j in range(len(blocks)): try: l.append(blocks[j][i]) except IndexError: pass l = bytes(l)", "f = matasano.blocks.bytes_to_block size = 16 self.assertEqual( f(size, 0), slice(0, size) ) self.assertEqual(", "is a test\".encode(\"ascii\") k_len = 3 blocks = f(b, k_len) self.assertEqual( len(blocks), k_len", "un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16 padded = matasano.blocks.pkcs_7(b, size)", "16 padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded =", "f(key, f(key, b)[0])[0], b ) def test_bytes_in_blocks(self): f = matasano.blocks.bytes_in_block size = 16", "slice(0, size) ) self.assertEqual( f(size, 1), slice(size, size * 2) ) def test_bytes_to_block(self):", "padded = matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) size = 16", "matasano.blocks.pkcs_7(b, size) un_padded = matasano.blocks.un_pkcs_7(padded, size) self.assertEqual(b, un_padded) padded = b\"ICE ICE BABY\\x04\\x04\\x04\\x04\"", "test_aes_ctr(self): f = matasano.blocks.aes_ctr key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") self.assertEqual( f(key,", "def test_aes_cbc(self): f = matasano.blocks.aes_cbc key = \"YELLOW SUBMARINE\".encode(\"ascii\") b = \"00foobarfoobar00\".encode(\"ascii\") iv", "python # encoding: utf-8 \"\"\" Test block crypto. \"\"\" import unittest import matasano.blocks" ]
[ "print \"handling of message failed in handle_messages in MessageMap:\" print msg def finalize(self):", "= 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST =", "= None def check_map_complete(self, prop): for service in self._service_infos: if not self._service_infos[service][prop]: return", "\" payload:\" if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition,", "\"repeated\" } if msg: for field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj =", "print msg # ======================= # create the message maps # ======================= def get_msg(self,", "return payload try: from json import loads as parse_json except: globals()['parse_json'] = _parse_json", "key in keys: if not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s: %s,'", "\"null\" elif isinstance(item, unicode): if not verbose_debug and len(item) > MAX_STR_LENGTH: value =", "1 FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER = 2", "c_list, name=''): INDENT = ' ' c_list = [] + c_list if name:", "True: if not tag in self._tags: return tag tag += 1 def set_callback(self,", "NAME = 1 FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER", "= {'name': name, 'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND =", "event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, []) # ========================= # pretty print message", "= True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message failed in handle_messages", "'{' for service in self._map: if not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service],", "MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service }) else: print \"handling", "def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True if check in \"*\": return", "from scope # =========================== def request_host_info(self): for service in self._services: if not service.startswith('core-')", "% value or value # =========================== # pretty print STP/1 messages # ===========================", "tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG:", "False return True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling of message", "msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False", "service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD:", "= command_def and command_def.get(\"name\", None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type =", "import status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload = None try: payload =", "msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID]", "= self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums',", "\" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload", "definition, verbose_debug=verbose_debug) except Exception, msg: # print msg print \"failed to pretty print", "=========================== # get the messages from scope # =========================== def request_host_info(self): for service", "MessageMap:\" print msg def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and service in", "\"%spayload: %s\" % (INDENT, payload) print \"%sdefinition: %s\" % (INDENT, definition) else: print", "def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling of message failed in default_msg_handler", "handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return", "0 MSG_ID = 0 map = self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs", "try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code) except: print \"parsing", "name = field[FIELD_NAME] field_obj = { 'name': name, 'q': 'required', 'type': field[FIELD_TYPE], }", "not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]: if service[0]", "= self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj = map[command[NUMBER]] = {} command_obj['name']", "MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS", "sub_item in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else:", "for msg in list: if msg[MSG_ID] == id: return msg return None def", "self._service_infos = {} self._map = map self._connection = connection self._callback = callback self._print_map", "not tag in self._tags: return tag tag += 1 def set_callback(self, callback, args={}):", "field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] =", "0 self.scope_minor_version = 0 self._service_infos = {} self._map = map self._connection = connection", "print \" tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload =", "msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service, command, message_type): if MessageMap.filter and service", "% (indent * INDENT) else: print '%s%s: [],' % (indent * INDENT, self.quote(key))", "tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service }) def handle_enums(self, msg, service): if", "map self._connection = connection self._callback = callback self._print_map = context.print_message_map self._print_map_services = filter(bool,", "self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self, msg,", "name = '%s: ' % self.quote(name) print '%s%s{' % (indent * INDENT, name)", "service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}' def quote(self, value): return", "to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO", "if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list:", "COMMAND_LIST = 0 EVENT_LIST = 1 NAME = 0 NUMBER = 1 MESSAGE_ID", "self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj = map[command[NUMBER]] =", "\"%s%s:\" % (indent * INDENT, definition['name']) for sub_item in item: pretty_print_payload_item( indent +", "self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list =", "True: m = matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if", "if check(command): return True return False # =========================== # pretty print STP/0 messages", "args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False tag_manager = TagManager() class", "RESPONSE_ID = 3 # Command MessageInfo MSG_LIST = 0 MSG_ID = 0 map", "self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}' def quote(self,", "verbose_debug=False): if item and \"message\" in definition: print \"%s%s:\" % (indent * INDENT,", "and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID and field[FIELD_ID]:", "print \" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print", "tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print", "print prelude if format: print \" message type:\", message_type print \" service:\", service", "return False # =========================== # pretty print STP/0 messages # =========================== def pretty_print_XML(prelude,", "name = enums[1] dict = {} if enums and len(enums) == 3: for", "2 FIELD_Q = 3 FIELD_ID = 4 ENUM_ID = 5 Q_MAP = {", "and msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if", "or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) -", "self._tags[tag] = (callback, args) return tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags:", "in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}' def quote(self, value): return isinstance(value,", "========================= # pretty print message maps # ========================= def pretty_print_object(self, obj, indent, c_list,", "MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT = \" \" MAX_STR_LENGTH = 50", "payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message in parse_json\" return payload", "= 4 ENUM_ID = 5 Q_MAP = { 0: \"required\", 1: \"optional\", 2:", "msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG]", "print '{' for service in self._map: if not self._print_map_services or service in self._print_map_services:", "context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the messages from scope # ===========================", "= enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else:", "None self._callback = None def check_map_complete(self, prop): for service in self._service_infos: if not", "service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\",", "in handle_info in MessageMap:\" print msg def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS]", "verbose_debug and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value =", "the specified filter failed\" print \"parsed filter:\", filter_obj if filter_obj: for service in", "try: from json import loads as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE =", "message structure?\" print \"%spayload: %s\" % (INDENT, payload) print \"%sdefinition: %s\" % (INDENT,", "\" MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self): self._counter = 1 self._tags =", "the payloads by adding the keys to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO", "self.quote(key)) indent -= 1 print '%s},' % (indent * INDENT) def pretty_print_message_map(self): print", "= command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: # print msg", "os.path.isfile(filter): try: file = open(filter, 'rb') content = file.read() file.close() except: print \"reading", "{} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command", "print message maps # ========================= def pretty_print_object(self, obj, indent, c_list, name=''): INDENT =", "False # =========================== # pretty print STP/0 messages # =========================== def pretty_print_XML(prelude, in_string,", "failed in handle_messages in MessageMap:\" print msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler()", "print \" message type:\", message_type print \" service:\", service print \" command:\", command_name", "'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT:", "definition, item, verbose_debug=False): if item and \"message\" in definition: print \"%s%s:\" % (indent", "\" \" MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self): self._counter = 1 self._tags", "and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload", "print \"parsing the specified filter failed\" print \"parsed filter:\", filter_obj if filter_obj: for", "import re from common import Singleton from maps import status_map, format_type_map, message_type_map, message_map", "(INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\",", "\"\"\" to create a description map of all messages to be used to", "handle_messages in MessageMap:\" print msg def request_infos(self): for service in self._service_infos: tag =", "self._services: if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums':", "quote(self, value): return isinstance(value, str) and '\"%s\"' % value or value # ===========================", "print \"parsed filter:\", filter_obj if filter_obj: for service in filter_obj: for type in", "= parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message list can be empty (e.g.", "OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count += 1 except StopIteration: pass except:", "\"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match", "key in keys: if isinstance(obj[key], list): if key == \"message\": if obj['message_name'] in", "message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def __init__(self, services,", "tag tag += 1 def set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag] =", "= parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None)", "= ' ' c_list = [] + c_list if name: name = '%s:", "command_def and command_def.get(\"name\", None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]]", "finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos = None self._map", "= 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m = matches_iter.next() matches", "keys: if not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s: %s,' % (indent", "self._services = services self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos = {} self._map", "event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums,", "command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE:", "self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False tag_manager =", "id: return msg return None def get_enum(self, list, id): enums = self.get_msg(list, id)", "print '%s%s{' % (indent * INDENT, name) indent += 1 keys = obj.keys()", "ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT,", "or isinstance(obj[key], list)): print '%s%s: %s,' % (indent * INDENT, self.quote(key), self.quote(obj[key])) for", "map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg,", "print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service, command, message_type): if", "while True: m = matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count -= 1", "INDENT, definition['name']) for sub_item in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition,", "filter:\", filter_obj if filter_obj: for service in filter_obj: for type in filter_obj[service]: filter_obj[service][type]", "<circular reference>,' % ( indent * INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print", "% ( indent * INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: ['", "item == None: value = \"null\" elif isinstance(item, unicode): if not verbose_debug and", "# pretty print STP/1 messages # =========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False):", "def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args)", "in c_list: print '%s\"message\": <circular reference>,' % ( indent * INDENT) continue else:", "versions = map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >=", "zip(payload, definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent * INDENT, definition['name'])", "in parse_json\" return payload try: from json import loads as parse_json except: globals()['parse_json']", "in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter =", "% self.quote(name) print '%s%s{' % (indent * INDENT, name) indent += 1 keys", "value) except: print \"%s%s: %s%s\" % ( indent * INDENT, name, value[0:100], '...')", "for item in obj[key]: self.pretty_print_object(item, indent + 1, c_list) print '%s],' % (indent", "MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [],", "except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2", "def _get_empty_tag(self): tag = 1 while True: if not tag in self._tags: return", "self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj = map[command[NUMBER]] = {} command_obj['name'] =", "definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def", "in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count = 0 matches_iter =", "to pretty print the paylod. wrong message structure?\" print \"%spayload: %s\" % (INDENT,", "(indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item if \"enum\"", "pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service", "= \" \" MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self): self._counter = 1", "4 print prelude if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret", "def __init__(self, services, connection, callback, context, map=message_map): self._services = services self.scope_major_version = 0", "self.quote(obj[key])) for key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for", "= 2 FIELD_Q = 3 FIELD_ID = 4 ENUM_ID = 5 Q_MAP =", "%s%s\" % ( indent * INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2,", "check_startswith if check.startswith('*'): check = check.strip('*') return check_endswith return check_default content = filter", "request_infos(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND,", "message_map def _parse_json(msg): payload = None try: payload = eval(msg.replace(\",null\", \",None\")) except: print", "COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT", "STP/0 messages # =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP 0", "self.request_enums() else: self.request_infos() else: print \"getting host info failed\" def request_enums(self): for service", "filter_obj = None import os if os.path.isfile(filter): try: file = open(filter, 'rb') content", "or cmd_id def __init__(self, services, connection, callback, context, map=message_map): self._services = services self.scope_major_version", "services, connection, callback, context, map=message_map): self._services = services self.scope_major_version = 0 self.scope_minor_version =", "=========================== # pretty print STP/1 messages # =========================== def pretty_print_payload_item(indent, name, definition, item,", "messages # =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP 0 messages\"\"\"", "5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1", "format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in", "verbose_debug=verbose_debug) except Exception, msg: # print msg print \"failed to pretty print the", "field[FIELD_ID]: if name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name]", "parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and", "INDENT = \" \" filter = None @staticmethod def set_filter(filter): def create_check(check): def", "in default_msg_handler in MessageMap:\" print msg # ======================= # create the message maps", "command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums,", "else: value = item if \"enum\" in definition: value = \"%s (%s)\" %", "m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group())", "MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service }) else:", "message list can be empty (e.g. for the 'core' service) if message_list: self.parse_raw_lists(service)", "pretty print message maps # ========================= def pretty_print_object(self, obj, indent, c_list, name=''): INDENT", "MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if", "= \" \" filter = None @staticmethod def set_filter(filter): def create_check(check): def check_default(in_str):", "file = open(filter, 'rb') content = file.read() file.close() except: print \"reading filter failed\"", "= None self._map = None self._connection = None self._callback = None def check_map_complete(self,", "= 0 for msg in list: if msg[MSG_ID] == id: return msg return", "def _parse_json(msg): payload = None try: payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed", "= None @staticmethod def set_filter(filter): def create_check(check): def check_default(in_str): return check == in_str", "name, value) except: print \"%s%s: %s%s\" % ( indent * INDENT, name, value[0:100],", "'q': 'required', 'type': field[FIELD_TYPE], } if (len(field) - 1) >= FIELD_Q and field[FIELD_Q]:", "self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if not", "=========================== # pretty print STP/0 messages # =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To", "2: \"repeated\" } if msg: for field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj", "1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1) >=", "= 1 MESSAGE_ID = 2 RESPONSE_ID = 3 # Command MessageInfo MSG_LIST =", "def pretty_print_object(self, obj, indent, c_list, name=''): INDENT = ' ' c_list = []", "all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO =", "in_string, format): \"\"\"To pretty print STP 0 messages\"\"\" LF = \"\\n\" TEXT =", "= 0 TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG =", "\"failed evaling message in parse_json\" return payload try: from json import loads as", "False tag_manager = TagManager() class MessageMap(object): \"\"\" to create a description map of", "not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s: %s,' % (indent * INDENT,", "print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else:", "+ c_list if name: name = '%s: ' % self.quote(name) print '%s%s{' %", "status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg:", "MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [],", "None self._service_infos = None self._map = None self._connection = None self._callback = None", "= {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for", "% (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item if", "= 0 MSG_ID = 0 map = self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST]", "\\ message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command): return True return", "dict = {} if enums and len(enums) == 3: for enum in enums[2]:", "if isinstance(obj[key], list): if key == \"message\": if obj['message_name'] in c_list: print '%s\"message\":", "map:' print '{' for service in self._map: if not self._print_map_services or service in", "for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE:", "payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service, command, message_type): if MessageMap.filter and", "print STP 0 messages\"\"\" LF = \"\\n\" TEXT = 0 TAG = 1", "7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4", "return check_pass if check.endswith('*'): check = check.strip('*') return check_startswith if check.startswith('*'): check =", "MessageMap.filter and service in MessageMap.filter and \\ message_type in MessageMap.filter[service]: for check in", "in list: if msg[MSG_ID] == id: return msg return None def get_enum(self, list,", "in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = { 'name': name, 'q': 'required', 'type':", "= 3 OPENING_TAG = 4 print prelude if format: if in_string.startswith(\"<\"): in_string =", "file.close() except: print \"reading filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval')", "= None if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id", "if MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print", "= msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\", None)", "INDENT, self.quote(key)) indent -= 1 print '%s},' % (indent * INDENT) def pretty_print_message_map(self):", "host_info: for service in host_info[5]: if service[0] == \"scope\": versions = map(int, service[1].split('.'))", "import os if os.path.isfile(filter): try: file = open(filter, 'rb') content = file.read() file.close()", "name or cmd_id def __init__(self, services, connection, callback, context, map=message_map): self._services = services", "= self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj = map[command[NUMBER]]", "tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg,", "in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \" uuid:\",", "0 NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID = 3 # Command MessageInfo", "=========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item and \"message\" in definition:", "print '%s%s: [],' % (indent * INDENT, self.quote(key)) indent -= 1 print '%s},'", "'\"%s\"' % value or value # =========================== # pretty print STP/1 messages #", "command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]]", "not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None", "field_obj = { 'name': name, 'q': 'required', 'type': field[FIELD_TYPE], } if (len(field) -", "keys = obj.keys() for key in keys: if not (isinstance(obj[key], dict) or isinstance(obj[key],", "format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def", "msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos =", "self.get_msg(list, id) name = enums[1] dict = {} if enums and len(enums) ==", "in event_list: event_obj = map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg = self.get_msg(msgs,", "{} self._map = map self._connection = connection self._callback = callback self._print_map = context.print_message_map", "pretty_print_object(self, obj, indent, c_list, name=''): INDENT = ' ' c_list = [] +", "field_obj['message_name'] = msg and msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list,", "msg: for field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = { 'name': name,", "import loads as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE =", "\"\\\"%s\\\"\" % item try: print \"%s%s: %s\" % ( indent * INDENT, name,", "return check_default content = filter filter_obj = None import os if os.path.isfile(filter): try:", "# print msg print \"failed to pretty print the paylod. wrong message structure?\"", "failed in handle_messages in MessageMap:\" print msg def request_infos(self): for service in self._service_infos:", "default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling of message failed in default_msg_handler in", "service in MessageMap.filter and \\ message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if", "except StopIteration: pass except: raise else: ret = [in_string] in_string = \"\".join(ret).lstrip(LF) print", "service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\",", "msg def request_infos(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({", "\"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' %", "= self.parse_msg(msg, msgs, {}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg,", "1]' % service }) else: print \"handling of message failed in handle_info in", "tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service }) else: print \"handling of", "MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON", "tag += 1 def set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag] = (callback,", "the paylod. wrong message structure?\" print \"%spayload: %s\" % (INDENT, payload) print \"%sdefinition:", "dict): self.pretty_print_object(obj[key], indent, c_list, key) for key in keys: if isinstance(obj[key], list): if", "= None self._service_infos = None self._map = None self._connection = None self._callback =", "6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1", "0 messages\"\"\" LF = \"\\n\" TEXT = 0 TAG = 1 CLOSING_TAG =", "%d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name, message_type):", "* INDENT, name) indent += 1 keys = obj.keys() for key in keys:", "= message_list # the message list can be empty (e.g. for the 'core'", "'%s},' % (indent * INDENT) def pretty_print_message_map(self): print 'message map:' print '{' for", "= {} event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs,", "def request_host_info(self): for service in self._services: if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service]", "return tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG])", "parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or []", "the message maps # ======================= def get_msg(self, list, id): MSG_ID = 0 for", "service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD:", "indent_count * INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]:", "if enums and len(enums) == 3: for enum in enums[2]: dict[enum[1]] = enum[0]", "10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT = \" \" filter =", "definition in zip(payload, definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent *", "\"handling of message failed in handle_messages in MessageMap:\" print msg def finalize(self): if", "tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT:", "for sub_item in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug)", "None self._connection = None self._callback = None def check_map_complete(self, prop): for service in", "in command_list: command_obj = map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg = self.get_msg(msgs,", "eval(code) except: print \"parsing the specified filter failed\" print \"parsed filter:\", filter_obj if", "definition: print \"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value", "item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def =", "1 if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT,", "c_list: print '%s\"message\": <circular reference>,' % ( indent * INDENT) continue else: c_list.append(obj['message_name'])", "None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info),", "\"\", in_string) ret = [] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try:", "None self._map = None self._connection = None self._callback = None def check_map_complete(self, prop):", "print msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos", "return check_startswith if check.startswith('*'): check = check.strip('*') return check_endswith return check_default content =", "'[\"%s\"]' % service }) def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and service", "message maps # ========================= def pretty_print_object(self, obj, indent, c_list, name=''): INDENT = '", "format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if", "\", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg", "name, dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST", "= 1 MSG_TYPE_ERROR = 4 INDENT = \" \" MAX_STR_LENGTH = 50 class", "= parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or", "if not MessageMap.filter or check_message(service, command_name, message_type): print prelude if format: print \"", "filter, 'eval') filter_obj = eval(code) except: print \"parsing the specified filter failed\" print", "filter_obj[service][type] = ( [create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod", "name, definition, item, verbose_debug=False): if item and \"message\" in definition: print \"%s%s:\" %", "obj, indent, c_list, name=''): INDENT = ' ' c_list = [] + c_list", "to create a description map of all messages to be used to pretty", "create_check(check): def check_default(in_str): return check == in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str):", "% msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name, message_type): print", "MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT", "= { 'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE:", "'}' def quote(self, value): return isinstance(value, str) and '\"%s\"' % value or value", "matches = m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or last_match ==", "indent += 1 keys = obj.keys() for key in keys: if not (isinstance(obj[key],", "\" \" filter = None @staticmethod def set_filter(filter): def create_check(check): def check_default(in_str): return", "self.parse_msg(msg, msgs, {}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs,", "indent + 1, c_list) print '%s],' % (indent * INDENT) else: print '%s%s:", "if MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print", "%s,' % (indent * INDENT, self.quote(key), self.quote(obj[key])) for key in keys: if isinstance(obj[key],", "if item and \"message\" in definition: print \"%s%s:\" % (indent * INDENT, name)", "= tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON,", "self._map = None self._connection = None self._callback = None def check_map_complete(self, prop): for", "= self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, []) msg = self.get_msg(msgs,", "= 4 print prelude if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string)", "= 1 self._tags = {} def _get_empty_tag(self): tag = 1 while True: if", "= self._get_empty_tag() self._tags[tag] = (callback, args) return tag def handle_message(self, msg): if msg[MSG_KEY_TAG]", "= 2 MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST = 0 EVENT_LIST =", "name) indent += 1 keys = obj.keys() for key in keys: if not", "%s\" % (INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print", "command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \" status:\",", "not tag_manager.handle_message(msg): print \"handling of message failed in default_msg_handler in MessageMap:\" print msg", "MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service }) def handle_enums(self, msg, service):", "message failed in handle_messages in MessageMap:\" print msg def finalize(self): if self._print_map: self.pretty_print_message_map()", "failed\" print \"parsed filter:\", filter_obj if filter_obj: for service in filter_obj: for type", "'[\"%s\", [], 1]' % service }) def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS]", "filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service,", "print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service,", "name, 'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE", "INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match =", "enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, [])", "in handle_messages in MessageMap:\" print msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback()", "request_enums(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND,", "format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count =", "parsed_list, raw_enums, field_obj['message']) if (len(field) - 1) >= ENUM_ID and field[ENUM_ID]: name, numbers", "NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID = 3 # Command MessageInfo MSG_LIST", "= None self._connection = None self._callback = None def check_map_complete(self, prop): for service", "for key in keys: if isinstance(obj[key], list): if key == \"message\": if obj['message_name']", "Command Info COMMAND_LIST = 0 EVENT_LIST = 1 NAME = 0 NUMBER =", "= self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) -", "be empty (e.g. for the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True", "def get_cmd_name(service, cmd_id): name = None if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\")", "__init__(self): self._counter = 1 self._tags = {} def _get_empty_tag(self): tag = 1 while", "obj['message_name'] in c_list: print '%s\"message\": <circular reference>,' % ( indent * INDENT) continue", "=========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP 0 messages\"\"\" LF =", "# =========================== def request_host_info(self): for service in self._services: if not service.startswith('core-') and not", "msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field)", "self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]'", "msg): if msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True", "def quote(self, value): return isinstance(value, str) and '\"%s\"' % value or value #", "wrong message structure?\" print \"%spayload: %s\" % (INDENT, payload) print \"%sdefinition: %s\" %", "re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string)", "msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST]", ">= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID", "INDENT, m.group()]) indent_count += 1 except StopIteration: pass except: raise else: ret =", "parse_json\" return payload try: from json import loads as parse_json except: globals()['parse_json'] =", "else: print msg def check_message(service, command, message_type): if MessageMap.filter and service in MessageMap.filter", "(e.g. for the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'):", "pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: # print msg print \"failed to pretty", "command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] =", "def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list =", "MessageMap.filter = filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name", "========================= def pretty_print_object(self, obj, indent, c_list, name=''): INDENT = ' ' c_list =", "% (indent * INDENT, self.quote(key), self.quote(obj[key])) for key in keys: if isinstance(obj[key], dict):", "self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\",", "\" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG", "= { 0: \"required\", 1: \"optional\", 2: \"repeated\" } if msg: for field", "command_list: command_obj = map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID])", "parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try:", "and len(enums) == 3: for enum in enums[2]: dict[enum[1]] = enum[0] return name,", "STP/1 messages # =========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item and", "\" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print", "try: while True: m = matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count -=", "list)): print '%s%s: %s,' % (indent * INDENT, self.quote(key), self.quote(obj[key])) for key in", "def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list =", "3 # Command MessageInfo MSG_LIST = 0 MSG_ID = 0 map = self._map[service]", "enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else: print", "message type:\", message_type print \" service:\", service print \" command:\", command_name print \"", "0 FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID = 4", "OPENING_TAG = 4 print prelude if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\",", "last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count += 1 except StopIteration:", "1, 1]' % service }) else: print \"handling of message failed in handle_info", "return name or cmd_id def __init__(self, services, connection, callback, context, map=message_map): self._services =", "check_default(in_str): return check == in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check)", "return in_str.startswith(check) def check_pass(in_str): return True if check in \"*\": return check_pass if", "obj[key]: self.pretty_print_object(item, indent + 1, c_list) print '%s],' % (indent * INDENT) else:", "= field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or 'default'", "self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]'", "1 print '%s},' % (indent * INDENT) def pretty_print_message_map(self): print 'message map:' print", "c_list if name: name = '%s: ' % self.quote(name) print '%s%s{' % (indent", "= versions[1] if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else: print \"getting host", "command_name = command_def and command_def.get(\"name\", None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type", "check.strip('*') return check_endswith return check_default content = filter filter_obj = None import os", "0: \"required\", 1: \"optional\", 2: \"repeated\" } if msg: for field in msg[FIELD_LIST]:", "command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\", None) or \\", "and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg:", "msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]:", "self.request_infos() else: print \"handling of message failed in handle_messages in MessageMap:\" print msg", "msg, msg_list, parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST = 2 FIELD_NAME =", "command_obj = map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND]", "msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or 'default' field_obj['message'] =", "1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5", ">= ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name,", "} if msg: for field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = {", "_parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3", "% service }) def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and service in", "print \"failed evaling message in parse_json\" return payload try: from json import loads", "+ 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item,", "\"required\", 1: \"optional\", 2: \"repeated\" } if msg: for field in msg[FIELD_LIST]: name", "field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service):", "{ 'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND,", "self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else: print \"getting host info failed\" def", "1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print prelude if", "msgs, {}, enums, []) # ========================= # pretty print message maps # =========================", "definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: # print", "parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]: if service[0] == \"scope\": versions =", "handle_messages in MessageMap:\" print msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services", "[] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) - 1) >= ENUM_ID and", "indent, c_list, key) for key in keys: if isinstance(obj[key], list): if key ==", "not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and", "self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else: print \"handling of message failed", "else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\"", "[] + c_list if name: name = '%s: ' % self.quote(name) print '%s%s{'", "= self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False tag_manager = TagManager() class MessageMap(object):", "self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the messages from scope # =========================== def", "service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None }", "def request_infos(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE:", "if MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE]", "enums[1] dict = {} if enums and len(enums) == 3: for enum in", "= eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message in parse_json\" return payload try:", "check.startswith('*'): check = check.strip('*') return check_endswith return check_default content = filter filter_obj =", "check = check.strip('*') return check_startswith if check.startswith('*'): check = check.strip('*') return check_endswith return", "os if os.path.isfile(filter): try: file = open(filter, 'rb') content = file.read() file.close() except:", "payload:\" if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug)", "unicode): if not verbose_debug and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH]", "if obj['message_name'] in c_list: print '%s\"message\": <circular reference>,' % ( indent * INDENT)", "def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition in zip(payload, definitions): if definition[\"q\"]", "'[]' }) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info:", "scope # =========================== def request_host_info(self): for service in self._services: if not service.startswith('core-') and", "ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or", "in MessageMap:\" print msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services =", "self.request_infos() else: print \"getting host info failed\" def request_enums(self): for service in self._service_infos:", "2 MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST = 0 EVENT_LIST = 1", "[]) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, []) if", "else: value = \"\\\"%s\\\"\" % item try: print \"%s%s: %s\" % ( indent", "command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE]", "print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG] if", "MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] ==", "indent_count -= 1 if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count", "msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def", "self.finalize() else: print \"handling of message failed in handle_messages in MessageMap:\" print msg", "name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item if \"enum\" in definition: value", "'...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition in zip(payload, definitions): if", "INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count +=", "True if self.check_map_complete('parsed'): self.finalize() else: print \"handling of message failed in handle_messages in", "in handle_messages in MessageMap:\" print msg def request_infos(self): for service in self._service_infos: tag", "def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name = None if message_map:", "print the paylod. wrong message structure?\" print \"%spayload: %s\" % (INDENT, payload) print", "MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and", "if host_info: for service in host_info[5]: if service[0] == \"scope\": versions = map(int,", "\"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item", "= 3 FIELD_ID = 4 ENUM_ID = 5 Q_MAP = { 0: \"required\",", "self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj = map[event[NUMBER]] = {} event_obj['name'] = event[NAME]", "handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD])", "definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent * INDENT, definition['name']) for", "self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos = None self._map = None self._connection =", "\" message type:\", message_type print \" service:\", service print \" command:\", command_name print", "check = check.strip('*') return check_endswith return check_default content = filter filter_obj = None", "type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter", "print msg def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos:", "= _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT =", "the keys to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO =", "event in event_list: event_obj = map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg =", "content = filter filter_obj = None import os if os.path.isfile(filter): try: file =", "\"%sdefinition: %s\" % (INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else:", "pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item if \"enum\" in definition: value =", "for key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for key", "dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST =", "(indent * INDENT) def pretty_print_message_map(self): print 'message map:' print '{' for service in", "% (indent * INDENT) def pretty_print_message_map(self): print 'message map:' print '{' for service", "filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code) except:", "check.endswith('*'): check = check.strip('*') return check_startswith if check.startswith('*'): check = check.strip('*') return check_endswith", "field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list,", "ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3", "format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg:", "prelude if format: print \" message type:\", message_type print \" service:\", service print", "in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg", "if not tag in self._tags: return tag tag += 1 def set_callback(self, callback,", "- 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj =", "= map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] =", "for check in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def has_map(): return bool(message_map)", "parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE", "return check == in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def", "msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages']", "last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF,", "% item try: print \"%s%s: %s\" % ( indent * INDENT, name, value)", "% (definition['enum']['numbers'][item], item) elif item == None: value = \"null\" elif isinstance(item, unicode):", "for service in self._map: if not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1,", "= self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or 'default' field_obj['message'] = []", "self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False tag_manager = TagManager() class MessageMap(object): \"\"\"", "self._map: if not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print", "1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj = map[event[NUMBER]]", "msg return None def get_enum(self, list, id): enums = self.get_msg(list, id) name =", "item try: print \"%s%s: %s\" % ( indent * INDENT, name, value) except:", "\"%s%s: %s%s\" % ( indent * INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions,", "cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in", "'%s],' % (indent * INDENT) else: print '%s%s: [],' % (indent * INDENT,", "messages\"\"\" LF = \"\\n\" TEXT = 0 TAG = 1 CLOSING_TAG = 2", "status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload = None try: payload = eval(msg.replace(\",null\",", "parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST = 2 FIELD_NAME", "} self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD:", "in MessageMap.filter and \\ message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command):", "paylod. wrong message structure?\" print \"%spayload: %s\" % (INDENT, payload) print \"%sdefinition: %s\"", "callback, context, map=message_map): self._services = services self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos", "print '%s],' % (indent * INDENT) else: print '%s%s: [],' % (indent *", "value = \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item == None: value =", "matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or last_match", "1]' % service }) def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and service", "return False return True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling of", "1 def set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag] = (callback, args) return", "if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos = None self._map =", "'parsed_enums': False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID:", "if not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s: %s,' % (indent *", "'%s%s: %s,' % (indent * INDENT, self.quote(key), self.quote(obj[key])) for key in keys: if", "description map of all messages to be used to pretty print the payloads", "evaling message in parse_json\" return payload try: from json import loads as parse_json", "-= 1 if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count *", "re from common import Singleton from maps import status_map, format_type_map, message_type_map, message_map def", "MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST", "from common import Singleton from maps import status_map, format_type_map, message_type_map, message_map def _parse_json(msg):", "name: name = '%s: ' % self.quote(name) print '%s%s{' % (indent * INDENT,", "pretty print STP 0 messages\"\"\" LF = \"\\n\" TEXT = 0 TAG =", "\"\\n\" else: print msg def check_message(service, command, message_type): if MessageMap.filter and service in", "enum[0] return name, dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME =", "INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent + 1, c_list) print '%s],'", "== \"message\": if obj['message_name'] in c_list: print '%s\"message\": <circular reference>,' % ( indent", "message_list # the message list can be empty (e.g. for the 'core' service)", "12 INDENT = \" \" filter = None @staticmethod def set_filter(filter): def create_check(check):", "messages to be used to pretty print the payloads by adding the keys", "MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' %", "= filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the messages from scope", "self.check_map_complete('parsed'): self.finalize() else: print \"handling of message failed in handle_messages in MessageMap:\" print", "service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list:", "- 1) >= ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] =", "command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >=", "0 EVENT_LIST = 1 NAME = 0 NUMBER = 1 MESSAGE_ID = 2", "INDENT) def pretty_print_message_map(self): print 'message map:' print '{' for service in self._map: if", "self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message failed in", "message maps # ======================= def get_msg(self, list, id): MSG_ID = 0 for msg", "= parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID])", "ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers':", "self._tags: return tag tag += 1 def set_callback(self, callback, args={}): tag = self._get_empty_tag()", "payload = None try: payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message", "= None import os if os.path.isfile(filter): try: file = open(filter, 'rb') content =", "if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count", "self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\",", "msg and msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message'])", "definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def", "'<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name,", "* INDENT, self.quote(key), self.quote(obj[key])) for key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent,", "MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self,", "None @staticmethod def set_filter(filter): def create_check(check): def check_default(in_str): return check == in_str def", "\" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service, command, message_type): if MessageMap.filter", "[],' % (indent * INDENT, self.quote(key)) indent -= 1 print '%s},' % (indent", "MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service", "1 keys = obj.keys() for key in keys: if not (isinstance(obj[key], dict) or", "None def get_enum(self, list, id): enums = self.get_msg(list, id) name = enums[1] dict", "message_type print \" service:\", service print \" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]]", "map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg,", "= message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\", None) or \\ '<id:", "self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos()", "INDENT, name, value) except: print \"%s%s: %s%s\" % ( indent * INDENT, name,", "parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or", "structure?\" print \"%spayload: %s\" % (INDENT, payload) print \"%sdefinition: %s\" % (INDENT, definition)", "event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, []) # ========================= # pretty print", "== OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match = CLOSING_TAG elif", "% (indent * INDENT, self.quote(key)) indent -= 1 print '%s},' % (indent *", "id): enums = self.get_msg(list, id) name = enums[1] dict = {} if enums", "MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service }) else: print \"handling of message", "for service in self._services: if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = {", "= event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, [])", "\"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def", "key) for key in keys: if isinstance(obj[key], list): if key == \"message\": if", "= check.strip('*') return check_startswith if check.startswith('*'): check = check.strip('*') return check_endswith return check_default", "# get the messages from scope # =========================== def request_host_info(self): for service in", "(indent * INDENT, self.quote(key)) indent -= 1 print '%s},' % (indent * INDENT)", "try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: # print msg print \"failed to", "MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service }) def handle_enums(self,", "filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter = filter_obj", "definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD],", "service) print '}' def quote(self, value): return isinstance(value, str) and '\"%s\"' % value", "'[\"%s\", [], 1, 1]' % service }) else: print \"handling of message failed", "message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name, message_type): print prelude if", "= True if self.check_map_complete('parsed'): self.finalize() else: print \"handling of message failed in handle_messages", "tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG:", "3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7", "@staticmethod def get_cmd_name(service, cmd_id): name = None if message_map: name = message_map.get(service, {}).get(int(cmd_id),", "except: print \"%s%s: %s%s\" % ( indent * INDENT, name, value[0:100], '...') def", "class MessageMap(object): \"\"\" to create a description map of all messages to be", "TEXT = 0 TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG", "keys to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11", "handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in", "_get_empty_tag(self): tag = 1 while True: if not tag in self._tags: return tag", "= None self._callback = None def check_map_complete(self, prop): for service in self._service_infos: if", "{ 'name': name, 'q': 'required', 'type': field[FIELD_TYPE], } if (len(field) - 1) >=", "{}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\", None) or \\ '<id: %d>' %", "= 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND =", "\"failed to pretty print the paylod. wrong message structure?\" print \"%spayload: %s\" %", "self._connection = connection self._callback = callback self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(','))", "'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG:", "\"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service", "= message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def __init__(self, services, connection, callback,", "service }) def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos:", "= enums[1] dict = {} if enums and len(enums) == 3: for enum", "verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service,", "empty (e.g. for the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if", "self._service_infos[service][prop]: return False return True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling", "=========================== def request_host_info(self): for service in self._services: if not service.startswith('core-') and not service.startswith('stp-'):", "parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg =", "while True: if not tag in self._tags: return tag tag += 1 def", ">= 1: self.request_enums() else: self.request_infos() else: print \"getting host info failed\" def request_enums(self):", "ret): NAME = 1 FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE = 1", "* INDENT) def pretty_print_message_map(self): print 'message map:' print '{' for service in self._map:", "NAME = 0 NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID = 3 #", "FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID and", "\"handling of message failed in handle_info in MessageMap:\" print msg def handle_messages(self, msg,", "= 50 class TagManager(Singleton): def __init__(self): self._counter = 1 self._tags = {} def", "args={}): tag = self._get_empty_tag() self._tags[tag] = (callback, args) return tag def handle_message(self, msg):", "message in parse_json\" return payload try: from json import loads as parse_json except:", "= None try: payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message in", "= 1 FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER =", "sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format,", "'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else: print", "try: print \"%s%s: %s\" % ( indent * INDENT, name, value) except: print", "msg # ======================= # create the message maps # ======================= def get_msg(self, list,", "% (indent * INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent + 1,", "= connection self._callback = callback self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler)", "= 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT = \" \" filter", "value = \"null\" elif isinstance(item, unicode): if not verbose_debug and len(item) > MAX_STR_LENGTH:", "\"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg):", "'name': name, 'q': 'required', 'type': field[FIELD_TYPE], } if (len(field) - 1) >= FIELD_Q", "command in command_list: command_obj = map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg =", "print \"%s%s: %s%s\" % ( indent * INDENT, name, value[0:100], '...') def pretty_print_payload(payload,", "* INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item if \"enum\" in", "'%s%s{' % (indent * INDENT, name) indent += 1 keys = obj.keys() for", "service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums'] =", "if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]: if", "format): \"\"\"To pretty print STP 0 messages\"\"\" LF = \"\\n\" TEXT = 0", "\"message\": if obj['message_name'] in c_list: print '%s\"message\": <circular reference>,' % ( indent *", "= 1 while True: if not tag in self._tags: return tag tag +=", "return isinstance(value, str) and '\"%s\"' % value or value # =========================== # pretty", "1) >= FIELD_ID and field[FIELD_ID]: if name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name']", "# pretty print STP/0 messages # =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty", "[]) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in", "get_enum(self, list, id): enums = self.get_msg(list, id) name = enums[1] dict = {}", "def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID],", "event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, []) #", "verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and", "OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print prelude if format: if in_string.startswith(\"<\"): in_string", "Q_MAP = { 0: \"required\", 1: \"optional\", 2: \"repeated\" } if msg: for", "MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info", "the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else:", "MessageMap:\" print msg def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None", "name, 'q': 'required', 'type': field[FIELD_TYPE], } if (len(field) - 1) >= FIELD_Q and", "self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg,", "if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\"", "= 2 RESPONSE_ID = 3 # Command MessageInfo MSG_LIST = 0 MSG_ID =", "{}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def __init__(self, services, connection, callback, context, map=message_map):", "command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command in", "LF = \"\\n\" TEXT = 0 TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG", "self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}' def quote(self, value): return isinstance(value, str)", "{} command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {},", "MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND", "{}).get(\"name\") return name or cmd_id def __init__(self, services, connection, callback, context, map=message_map): self._services", "= self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command in command_list:", "( indent * INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for", "{} event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {},", "def create_check(check): def check_default(in_str): return check == in_str def check_endswith(in_str): return in_str.endswith(check) def", "message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command): return True return False", "filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name = None", "3 OPENING_TAG = 4 print prelude if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\",", "payload try: from json import loads as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE", "check_pass if check.endswith('*'): check = check.strip('*') return check_startswith if check.startswith('*'): check = check.strip('*')", "* INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' % (indent *", "= self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj = map[event[NUMBER]] = {} event_obj['name'] =", "field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj)", "filter_obj: for service in filter_obj: for type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check)", "value = \"\\\"%s\\\"\" % item try: print \"%s%s: %s\" % ( indent *", "1, [], service) print '}' def quote(self, value): return isinstance(value, str) and '\"%s\"'", "MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self, msg, service):", "or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}' def quote(self, value):", "or check_message(service, command_name, message_type): print prelude if format: print \" message type:\", message_type", "return True return False # =========================== # pretty print STP/0 messages # ===========================", "field_obj['message']) if (len(field) - 1) >= ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums,", "self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO,", "(callback, args) return tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback, args", "self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO,", "if (len(field) - 1) >= ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID])", "elif item == None: value = \"null\" elif isinstance(item, unicode): if not verbose_debug", "format: print \" message type:\", message_type print \" service:\", service print \" command:\",", "@staticmethod def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name = None if", "enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of", "MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \"", "if check.startswith('*'): check = check.strip('*') return check_endswith return check_default content = filter filter_obj", "values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12", "# ======================= # create the message maps # ======================= def get_msg(self, list, id):", "code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code) except: print \"parsing the", "m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG", "5 Q_MAP = { 0: \"required\", 1: \"optional\", 2: \"repeated\" } if msg:", "matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF,", "return msg return None def get_enum(self, list, id): enums = self.get_msg(list, id) name", "[create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def has_map(): return", "prelude if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = []", "MessageMap.filter[service][message_type]: if check(command): return True return False # =========================== # pretty print STP/0", "field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg = self.get_msg(msg_list,", "check in \"*\": return check_pass if check.endswith('*'): check = check.strip('*') return check_startswith if", "MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR", "MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]:", "for service in self._service_infos: if not self._service_infos[service][prop]: return False return True def default_msg_handler(self,", "= 3 # Command MessageInfo MSG_LIST = 0 MSG_ID = 0 map =", "versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else: print", "for event in event_list: event_obj = map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg", "return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT =", "item and \"message\" in definition: print \"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item,", "message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else: print \"handling of message", "globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT", "not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list #", "MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \"", "m = matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT]", "{}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums,", "self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos = None self._map = None", "}) else: print \"handling of message failed in handle_info in MessageMap:\" print msg", "= 1 NAME = 0 NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID =", "MessageMap:\" print msg def request_infos(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\":", "for check in MessageMap.filter[service][message_type]: if check(command): return True return False # =========================== #", "host info failed\" def request_enums(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service':", "in_string) try: while True: m = matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count", "msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = { 'name': name, 'q': 'required', 'type': field[FIELD_TYPE],", "MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST = 0 EVENT_LIST", "{}, enums, []) # ========================= # pretty print message maps # ========================= def", "filter = None @staticmethod def set_filter(filter): def create_check(check): def check_default(in_str): return check ==", "print '}' def quote(self, value): return isinstance(value, str) and '\"%s\"' % value or", "[]) # ========================= # pretty print message maps # ========================= def pretty_print_object(self, obj,", "# ======================= def get_msg(self, list, id): MSG_ID = 0 for msg in list:", "import Singleton from maps import status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload =", "if check.endswith('*'): check = check.strip('*') return check_startswith if check.startswith('*'): check = check.strip('*') return", "INDENT, name) indent += 1 keys = obj.keys() for key in keys: if", "parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 # Command", "\"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service, command,", "self._service_infos = None self._map = None self._connection = None self._callback = None def", "% (indent * INDENT, name) indent += 1 keys = obj.keys() for key", "MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]'", "and '\"%s\"' % value or value # =========================== # pretty print STP/1 messages", "FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q", "CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print prelude if format:", "format_type_map, message_type_map, message_map def _parse_json(msg): payload = None try: payload = eval(msg.replace(\",null\", \",None\"))", "value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition in zip(payload, definitions):", "in enums[2]: dict[enum[1]] = enum[0] return name, dict def parse_msg(self, msg, msg_list, parsed_list,", "not verbose_debug and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value", "cmd_id): name = None if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name", "MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self, msg, service): if not", "0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m = matches_iter.next() matches =", "not MessageMap.filter or check_message(service, command_name, message_type): print prelude if format: print \" message", "obj.keys() for key in keys: if not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print", "for type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check in filter_obj[service][type]] )", "if check in \"*\": return check_pass if check.endswith('*'): check = check.strip('*') return check_startswith", "the message list can be empty (e.g. for the 'core' service) if message_list:", "True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message failed in handle_messages in", "msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, []) # =========================", "handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD])", "isinstance(value, str) and '\"%s\"' % value or value # =========================== # pretty print", "check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True if check", "self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos = {} self._map = map self._connection", "get_msg(self, list, id): MSG_ID = 0 for msg in list: if msg[MSG_ID] ==", "def check_message(service, command, message_type): if MessageMap.filter and service in MessageMap.filter and \\ message_type", "# ========================= # pretty print message maps # ========================= def pretty_print_object(self, obj, indent,", "1 NAME = 0 NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID = 3", "\"\\n\" TEXT = 0 TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3", "* INDENT, m.group()]) indent_count += 1 except StopIteration: pass except: raise else: ret", "# pretty print message maps # ========================= def pretty_print_object(self, obj, indent, c_list, name=''):", "\"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item try: print \"%s%s: %s\"", "======================= def get_msg(self, list, id): MSG_ID = 0 for msg in list: if", "1 self._tags = {} def _get_empty_tag(self): tag = 1 while True: if not", "1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST = 0", "1: \"optional\", 2: \"repeated\" } if msg: for field in msg[FIELD_LIST]: name =", "prop): for service in self._service_infos: if not self._service_infos[service][prop]: return False return True def", "c_list) print '%s],' % (indent * INDENT) else: print '%s%s: [],' % (indent", "and \"message\" in definition: print \"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"],", "tag_manager.handle_message(msg): print \"handling of message failed in default_msg_handler in MessageMap:\" print msg #", "== id: return msg return None def get_enum(self, list, id): enums = self.get_msg(list,", "field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) - 1) >=", "== \"repeated\": print \"%s%s:\" % (indent * INDENT, definition['name']) for sub_item in item:", "= tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON,", "\"reading filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code)", "not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] =", "self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service':", "def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item and \"message\" in definition: print", "1: self.request_enums() else: self.request_infos() else: print \"getting host info failed\" def request_enums(self): for", "indent * INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' % (indent", "msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload and not", "pretty print the paylod. wrong message structure?\" print \"%spayload: %s\" % (INDENT, payload)", "except: print \"parsing the specified filter failed\" print \"parsed filter:\", filter_obj if filter_obj:", "can be empty (e.g. for the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] =", "1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT = \" \" MAX_STR_LENGTH =", "self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service }) def", "# Command MessageInfo MSG_LIST = 0 MSG_ID = 0 map = self._map[service] =", "indent -= 1 print '%s},' % (indent * INDENT) def pretty_print_message_map(self): print 'message", "= compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code) except: print \"parsing the specified", "pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition in zip(payload, definitions): if definition[\"q\"] ==", "= 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS =", "check_message(service, command_name, message_type): print prelude if format: print \" message type:\", message_type print", "msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload", "isinstance(obj[key], list): if key == \"message\": if obj['message_name'] in c_list: print '%s\"message\": <circular", "( indent * INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' %", "in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list", "all messages to be used to pretty print the payloads by adding the", "INDENT, self.quote(key), self.quote(obj[key])) for key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list,", "+ 1, c_list) print '%s],' % (indent * INDENT) else: print '%s%s: [],'", "self._counter = 1 self._tags = {} def _get_empty_tag(self): tag = 1 while True:", "in self._tags: return tag tag += 1 def set_callback(self, callback, args={}): tag =", "__init__(self, services, connection, callback, context, map=message_map): self._services = services self.scope_major_version = 0 self.scope_minor_version", "if key == \"message\": if obj['message_name'] in c_list: print '%s\"message\": <circular reference>,' %", "MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and command_def: definition =", "list can be empty (e.g. for the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed']", "ret.extend([LF, indent_count * INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in", "item) elif item == None: value = \"null\" elif isinstance(item, unicode): if not", "'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE =", "(indent * INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent + 1, c_list)", "}) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for", "in keys: if isinstance(obj[key], list): if key == \"message\": if obj['message_name'] in c_list:", "(indent * INDENT, self.quote(key), self.quote(obj[key])) for key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key],", "None) command_name = command_def and command_def.get(\"name\", None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID]", "MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self): self._counter = 1 self._tags = {}", "MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' %", "failed\" def request_enums(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({", "pretty print the payloads by adding the keys to all values\"\"\" COMMAND_INFO =", "MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]'", "= \"\\n\" TEXT = 0 TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG =", "or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message", "tag = self._get_empty_tag() self._tags[tag] = (callback, args) return tag def handle_message(self, msg): if", "in_string) ret = [] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while", "and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\"", "(INDENT, payload) print \"%sdefinition: %s\" % (INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD]", "* INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent + 1, c_list) print", "maps import status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload = None try: payload", "value # =========================== # pretty print STP/1 messages # =========================== def pretty_print_payload_item(indent, name,", "\"handling of message failed in default_msg_handler in MessageMap:\" print msg # ======================= #", "msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]: if service[0] ==", "indent, c_list, name=''): INDENT = ' ' c_list = [] + c_list if", "if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message failed in handle_messages in MessageMap:\"", "in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID:", "of message failed in handle_messages in MessageMap:\" print msg def request_infos(self): for service", "Command MessageInfo MSG_LIST = 0 MSG_ID = 0 map = self._map[service] = {}", "field[FIELD_TYPE], } if (len(field) - 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]]", "[]) for command in command_list: command_obj = map[command[NUMBER]] = {} command_obj['name'] = command[NAME]", "return True return False tag_manager = TagManager() class MessageMap(object): \"\"\" to create a", "service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD:", "verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload,", "check == in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str):", "= filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name =", "MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT = \" \"", "enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True", "(len(field) - 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) -", "= OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count", "self._service_infos: if not self._service_infos[service][prop]: return False return True def default_msg_handler(self, msg): if not", "of message failed in handle_info in MessageMap:\" print msg def handle_messages(self, msg, service):", "= '%s: ' % self.quote(name) print '%s%s{' % (indent * INDENT, name) indent", "[' % (indent * INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent +", "msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if", "self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent + 1, c_list) print '%s],' %", "= versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else:", "or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match =", "= context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the", "MESSAGE_ID = 2 RESPONSE_ID = 3 # Command MessageInfo MSG_LIST = 0 MSG_ID", "in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count = 0 matches_iter", "return tag tag += 1 def set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag]", "OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG]", "args) return tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback, args =", "in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID:", "= 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID =", "None try: payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message in parse_json\"", "self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) - 1) >= ENUM_ID and field[ENUM_ID]:", "( indent * INDENT, name, value) except: print \"%s%s: %s%s\" % ( indent", "print \"reading filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj =", "= self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, []) # ========================= #", "print \"handling of message failed in handle_info in MessageMap:\" print msg def handle_messages(self,", "'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE:", "== in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return", "pretty_print_message_map(self): print 'message map:' print '{' for service in self._map: if not self._print_map_services", "in MessageMap:\" print msg # ======================= # create the message maps # =======================", "\"getting host info failed\" def request_enums(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums,", "message failed in default_msg_handler in MessageMap:\" print msg # ======================= # create the", "MessageMap:\" print msg # ======================= # create the message maps # ======================= def", "event_obj = map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT]", "Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID and field[FIELD_ID]: if name in parsed_list:", "(indent * INDENT, name) indent += 1 keys = obj.keys() for key in", "50 class TagManager(Singleton): def __init__(self): self._counter = 1 self._tags = {} def _get_empty_tag(self):", "self.quote(name) print '%s%s{' % (indent * INDENT, name) indent += 1 keys =", "in zip(payload, definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent * INDENT,", "service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums': False, 'raw_infos': None,", "dict) or isinstance(obj[key], list)): print '%s%s: %s,' % (indent * INDENT, self.quote(key), self.quote(obj[key]))", "{'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag,", "{'name': name, 'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1", "service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 # Command Info", "pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None)", "{ 0: \"required\", 1: \"optional\", 2: \"repeated\" } if msg: for field in", "for enum in enums[2]: dict[enum[1]] = enum[0] return name, dict def parse_msg(self, msg,", "INDENT = ' ' c_list = [] + c_list if name: name =", "services self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos = {} self._map = map", "if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else:", "list): if key == \"message\": if obj['message_name'] in c_list: print '%s\"message\": <circular reference>,'", "if not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums']", "c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' % (indent * INDENT, self.quote(key)) for item", "= {} def _get_empty_tag(self): tag = 1 while True: if not tag in", "= filter filter_obj = None import os if os.path.isfile(filter): try: file = open(filter,", "in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command): return True return False #", "= msg and msg[1] or 'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums,", "c_list = [] + c_list if name: name = '%s: ' % self.quote(name)", "indent_count * INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()])", "self.pretty_print_object(item, indent + 1, c_list) print '%s],' % (indent * INDENT) else: print", "callback(msg, **args) return True return False tag_manager = TagManager() class MessageMap(object): \"\"\" to", "= 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT = \" \" MAX_STR_LENGTH", "MessageMap(object): \"\"\" to create a description map of all messages to be used", "True if check in \"*\": return check_pass if check.endswith('*'): check = check.strip('*') return", "self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', [])", "if MessageMap.filter and service in MessageMap.filter and \\ message_type in MessageMap.filter[service]: for check", "'default' field_obj['message'] = [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) - 1)", "msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos'])", "if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count = 0", "2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6", "indent=2, verbose_debug=False): for item, definition in zip(payload, definitions): if definition[\"q\"] == \"repeated\": print", "else: print \"handling of message failed in handle_info in MessageMap:\" print msg def", "# the message list can be empty (e.g. for the 'core' service) if", "eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message in parse_json\" return payload try: from", "= {} if enums and len(enums) == 3: for enum in enums[2]: dict[enum[1]]", "def pretty_print_message_map(self): print 'message map:' print '{' for service in self._map: if not", "and command_def.get(\"name\", None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if", "create the message maps # ======================= def get_msg(self, list, id): MSG_ID = 0", "8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT = \"", "= \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item try: print \"%s%s:", "(len(field) - 1) >= FIELD_ID and field[FIELD_ID]: if name in parsed_list: field_obj['message'] =", "= ( [create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def", "= tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON,", "\"\"\"To pretty print STP 0 messages\"\"\" LF = \"\\n\" TEXT = 0 TAG", "msg): if not tag_manager.handle_message(msg): print \"handling of message failed in default_msg_handler in MessageMap:\"", "name=''): INDENT = ' ' c_list = [] + c_list if name: name", "not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}' def", "value or value # =========================== # pretty print STP/1 messages # =========================== def", "= 1 FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID = 4 ENUM_ID =", "True return False tag_manager = TagManager() class MessageMap(object): \"\"\" to create a description", "self.quote(key), self.quote(obj[key])) for key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key)", "msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID]", "% (INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \"", "self._service_infos[service]['raw_messages'] = message_list # the message list can be empty (e.g. for the", "check_pass(in_str): return True if check in \"*\": return check_pass if check.endswith('*'): check =", "= 2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print prelude if format: if", "list, id): MSG_ID = 0 for msg in list: if msg[MSG_ID] == id:", "def finalize(self): if self._print_map: self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos = None", "if MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print", "item in obj[key]: self.pretty_print_object(item, indent + 1, c_list) print '%s],' % (indent *", "if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else: print \"getting host info failed\"", "name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition in zip(payload,", "for service in filter_obj: for type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for", "return check_endswith return check_default content = filter filter_obj = None import os if", "# create the message maps # ======================= def get_msg(self, list, id): MSG_ID =", "{\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag,", "0 map = self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums", "print msg def request_infos(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service})", "status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in", "= 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD =", "% (INDENT, payload) print \"%sdefinition: %s\" % (INDENT, definition) else: print \" \",", "\" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID", "message_type): print prelude if format: print \" message type:\", message_type print \" service:\",", "msg[MSG_KEY_STATUS] and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list", "FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID", "MSG_ID = 0 for msg in list: if msg[MSG_ID] == id: return msg", "= 0 map = self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST]", "ENUM_ID = 5 Q_MAP = { 0: \"required\", 1: \"optional\", 2: \"repeated\" }", "* INDENT) else: print '%s%s: [],' % (indent * INDENT, self.quote(key)) indent -=", "== 3: for enum in enums[2]: dict[enum[1]] = enum[0] return name, dict def", "map=message_map): self._services = services self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos = {}", "if definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent * INDENT, definition['name']) for sub_item", "MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self,", "service }) def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos:", "in host_info[5]: if service[0] == \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version = versions[0]", "{} def _get_empty_tag(self): tag = 1 while True: if not tag in self._tags:", "in msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR:", "enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0]", "= [] + c_list if name: name = '%s: ' % self.quote(name) print", "field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND", "format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name =", "INDENT) else: print '%s%s: [],' % (indent * INDENT, self.quote(key)) indent -= 1", "request_host_info(self): for service in self._services: if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] =", "= self.get_msg(list, id) name = enums[1] dict = {} if enums and len(enums)", "True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling of message failed in", "2 RESPONSE_ID = 3 # Command MessageInfo MSG_LIST = 0 MSG_ID = 0", "Exception, msg: # print msg print \"failed to pretty print the paylod. wrong", "MessageMap.filter and \\ message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command): return", "from maps import status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload = None try:", "= obj.keys() for key in keys: if not (isinstance(obj[key], dict) or isinstance(obj[key], list)):", "dict[enum[1]] = enum[0] return name, dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret):", "parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name']", "has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name = None if message_map: name", "command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: # print msg print", "except Exception, msg: # print msg print \"failed to pretty print the paylod.", "FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID = 4 ENUM_ID", "if self.check_map_complete('parsed'): self.finalize() else: print \"handling of message failed in handle_messages in MessageMap:\"", "for service in host_info[5]: if service[0] == \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version", "= open(filter, 'rb') content = file.read() file.close() except: print \"reading filter failed\" try:", "in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False tag_manager", "INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition in", "in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for key in keys:", "isinstance(obj[key], list)): print '%s%s: %s,' % (indent * INDENT, self.quote(key), self.quote(obj[key])) for key", "# Command Info COMMAND_LIST = 0 EVENT_LIST = 1 NAME = 0 NUMBER", "msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, []) msg =", "# =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP 0 messages\"\"\" LF", "= 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID =", "set_filter(filter): def create_check(check): def check_default(in_str): return check == in_str def check_endswith(in_str): return in_str.endswith(check)", "command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, []) msg", "enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event", "try: payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling message in parse_json\" return", "= {} self._map = map self._connection = connection self._callback = callback self._print_map =", "in keys: if not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s: %s,' %", "'\\n'), filter, 'eval') filter_obj = eval(code) except: print \"parsing the specified filter failed\"", "= file.read() file.close() except: print \"reading filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'),", "service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message list", "print '%s%s: %s,' % (indent * INDENT, self.quote(key), self.quote(obj[key])) for key in keys:", "(indent * INDENT) else: print '%s%s: [],' % (indent * INDENT, self.quote(key)) indent", "if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def __init__(self,", "field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID and field[FIELD_ID]: if", "- 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1)", "value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item try: print", "json import loads as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE", "'type': field[FIELD_TYPE], } if (len(field) - 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] =", "' % self.quote(name) print '%s%s{' % (indent * INDENT, name) indent += 1", "' c_list = [] + c_list if name: name = '%s: ' %", "keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for key in keys: if", "self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service })", "msgs, {}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {},", "1, c_list) print '%s],' % (indent * INDENT) else: print '%s%s: [],' %", "payload) print \"%sdefinition: %s\" % (INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print", "connection, callback, context, map=message_map): self._services = services self.scope_major_version = 0 self.scope_minor_version = 0", "MessageMap.filter or check_message(service, command_name, message_type): print prelude if format: print \" message type:\",", "self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] = self.parse_msg(msg, msgs, {}, enums, []) # ========================= # pretty", "print \"%spayload: %s\" % (INDENT, payload) print \"%sdefinition: %s\" % (INDENT, definition) else:", "m.group()]) indent_count += 1 except StopIteration: pass except: raise else: ret = [in_string]", "and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers}", "COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT = \" \" filter = None", "print \" payload:\" if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload,", "m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count += 1", "self._get_empty_tag() self._tags[tag] = (callback, args) return tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in", "return True if check in \"*\": return check_pass if check.endswith('*'): check = check.strip('*')", "in MessageMap.filter[service][message_type]: if check(command): return True return False # =========================== # pretty print", "a description map of all messages to be used to pretty print the", "= 5 Q_MAP = { 0: \"required\", 1: \"optional\", 2: \"repeated\" } if", "* INDENT, definition['name']) for sub_item in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"),", "MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG", "pretty print STP/1 messages # =========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if", "MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service })", "reference>,' % ( indent * INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s:", "compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code) except: print \"parsing the specified filter", "in_str.startswith(check) def check_pass(in_str): return True if check in \"*\": return check_pass if check.endswith('*'):", "(len(field) - 1) >= ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum']", "not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums': False, 'raw_infos':", "indent_count += 1 except StopIteration: pass except: raise else: ret = [in_string] in_string", "failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj = eval(code) except: print", "check in MessageMap.filter[service][message_type]: if check(command): return True return False # =========================== # pretty", "print \"getting host info failed\" def request_enums(self): for service in self._service_infos: tag =", "else: last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count += 1 except", "\"scope\": versions = map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version", "(definition['enum']['numbers'][item], item) elif item == None: value = \"null\" elif isinstance(item, unicode): if", "= map self._connection = connection self._callback = callback self._print_map = context.print_message_map self._print_map_services =", "message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message list can be empty", "tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS]", "field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1] or 'default' field_obj['message']", "map = self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums =", "payloads by adding the keys to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO =", "1 while True: if not tag in self._tags: return tag tag += 1", "= 0 self.scope_minor_version = 0 self._service_infos = {} self._map = map self._connection =", "and service in MessageMap.filter and \\ message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]:", "def check_pass(in_str): return True if check in \"*\": return check_pass if check.endswith('*'): check", "get the messages from scope # =========================== def request_host_info(self): for service in self._services:", "and \\ message_type in MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command): return True", "elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT,", "message_type_map, message_map def _parse_json(msg): payload = None try: payload = eval(msg.replace(\",null\", \",None\")) except:", "* INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item, definition", "} if (len(field) - 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if", "Info COMMAND_LIST = 0 EVENT_LIST = 1 NAME = 0 NUMBER = 1", "0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4", "payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE],", "value = item if \"enum\" in definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item],", "key in keys: if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for key in", "message failed in handle_info in MessageMap:\" print msg def handle_messages(self, msg, service): if", "elif isinstance(item, unicode): if not verbose_debug and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\"", "INDENT) continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' % (indent * INDENT,", "service in self._map: if not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [],", "in obj[key]: self.pretty_print_object(item, indent + 1, c_list) print '%s],' % (indent * INDENT)", "@staticmethod def set_filter(filter): def create_check(check): def check_default(in_str): return check == in_str def check_endswith(in_str):", "* INDENT, self.quote(key)) indent -= 1 print '%s},' % (indent * INDENT) def", "not self._service_infos[service][prop]: return False return True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print", "(indent * INDENT, definition['name']) for sub_item in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\",", "None import os if os.path.isfile(filter): try: file = open(filter, 'rb') content = file.read()", "msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\", None) or", "COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT = \" \"", ">= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj = map[event[NUMBER]] =", "* INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count", "self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, []) msg = self.get_msg(msgs, command[RESPONSE_ID])", "tag in self._tags: return tag tag += 1 def set_callback(self, callback, args={}): tag", "INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value = item if \"enum\" in definition:", "callback self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== #", "field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID and field[FIELD_ID]: if name", "\"repeated\": print \"%s%s:\" % (indent * INDENT, definition['name']) for sub_item in item: pretty_print_payload_item(", "-= 1 print '%s},' % (indent * INDENT) def pretty_print_message_map(self): print 'message map:'", "STP 0 messages\"\"\" LF = \"\\n\" TEXT = 0 TAG = 1 CLOSING_TAG", "MessageInfo MSG_LIST = 0 MSG_ID = 0 map = self._map[service] = {} command_list", "print '%s\"message\": <circular reference>,' % ( indent * INDENT) continue else: c_list.append(obj['message_name']) if", "= message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name, message_type): print prelude if format:", "= 0 FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID =", "if not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list", "\" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID", "self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj) return ret def parse_raw_lists(self,", "service in host_info[5]: if service[0] == \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version =", "str) and '\"%s\"' % value or value # =========================== # pretty print STP/1", "in filter_obj: for type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check in", "def set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag] = (callback, args) return tag", "if isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for key in keys: if isinstance(obj[key],", "COMMAND_ENUM_INFO = 12 INDENT = \" \" filter = None @staticmethod def set_filter(filter):", "print msg def check_message(service, command, message_type): if MessageMap.filter and service in MessageMap.filter and", "MSG_LIST = 0 MSG_ID = 0 map = self._map[service] = {} command_list =", "self._services = None self._service_infos = None self._map = None self._connection = None self._callback", "\" tag:\", msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD])", "MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD", "= parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({", "1 FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID = 4 ENUM_ID = 5", "indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition,", "if msg[MSG_ID] == id: return msg return None def get_enum(self, list, id): enums", "if obj[key]: print '%s%s: [' % (indent * INDENT, self.quote(key)) for item in", "\" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \"", "indent=indent+1) else: value = item if \"enum\" in definition: value = \"%s (%s)\"", "host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]: if service[0] == \"scope\":", "\",None\")) except: print \"failed evaling message in parse_json\" return payload try: from json", "check_map_complete(self, prop): for service in self._service_infos: if not self._service_infos[service][prop]: return False return True", "service in self._service_infos: if not self._service_infos[service][prop]: return False return True def default_msg_handler(self, msg):", "in msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \" tag:\",", "except: print \"reading filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter, 'eval') filter_obj", "= 1 MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG =", "connection self._callback = callback self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info()", "2 FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q = 3", "filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the messages from scope #", "matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m = matches_iter.next() matches = m.groups()", "enum in enums[2]: dict[enum[1]] = enum[0] return name, dict def parse_msg(self, msg, msg_list,", "and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums']", "uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg: print \" tag:\", msg[MSG_KEY_TAG] if format_payload and", "== MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and command_def: definition", "TagManager(Singleton): def __init__(self): self._counter = 1 self._tags = {} def _get_empty_tag(self): tag =", "}) def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list", "{}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for", "MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service }) def handle_info(self, msg, service): if", "indent * INDENT, name, value) except: print \"%s%s: %s%s\" % ( indent *", "command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO,", "name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def __init__(self, services, connection,", "self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj", "[] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m =", "enums = self.get_msg(list, id) name = enums[1] dict = {} if enums and", "item, verbose_debug=False): if item and \"message\" in definition: print \"%s%s:\" % (indent *", "definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg,", "event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj = map[event[NUMBER]] = {} event_obj['name']", "service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] =", "else: parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg and msg[1]", "Singleton from maps import status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload = None", "definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent * INDENT, definition['name']) for sub_item in", "enums and len(enums) == 3: for enum in enums[2]: dict[enum[1]] = enum[0] return", "msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if", "as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID", "message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\", None) or \\ '<id: %d>'", "if name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] =", "'%s%s: [],' % (indent * INDENT, self.quote(key)) indent -= 1 print '%s},' %", "the messages from scope # =========================== def request_host_info(self): for service in self._services: if", "\"%s%s: %s\" % ( indent * INDENT, name, value) except: print \"%s%s: %s%s\"", "ret = [] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True:", "service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not", "if not tag_manager.handle_message(msg): print \"handling of message failed in default_msg_handler in MessageMap:\" print", "continue else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' % (indent * INDENT, self.quote(key))", "= services self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos = {} self._map =", "msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None:", "True return False # =========================== # pretty print STP/0 messages # =========================== def", "msg in list: if msg[MSG_ID] == id: return msg return None def get_enum(self,", "MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service", "return name, dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME = 1", "check_default content = filter filter_obj = None import os if os.path.isfile(filter): try: file", "if not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service) print '}'", "self._service_infos[service] = { 'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({", "1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug)", "enums = self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj = map[command[NUMBER]] = {}", "2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print prelude if format: if in_string.startswith(\"<\"):", "% ( indent * INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False):", "in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match =", "numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj) return ret", "matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match = OPENING_TAG", "be used to pretty print the payloads by adding the keys to all", "MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1,", "def check_default(in_str): return check == in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return", "= self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj) return ret def", "last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF,", "# =========================== # pretty print STP/1 messages # =========================== def pretty_print_payload_item(indent, name, definition,", "adding the keys to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO", "open(filter, 'rb') content = file.read() file.close() except: print \"reading filter failed\" try: code", "MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID", "MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \"", "field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = { 'name': name, 'q': 'required',", "field[FIELD_NAME] field_obj = { 'name': name, 'q': 'required', 'type': field[FIELD_TYPE], } if (len(field)", "= (callback, args) return tag def handle_message(self, msg): if msg[MSG_KEY_TAG] in self._tags: callback,", "= CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count", "message_type): if MessageMap.filter and service in MessageMap.filter and \\ message_type in MessageMap.filter[service]: for", "command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service})", "msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID] if MSG_KEY_TAG in msg:", "tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD])", "print the payloads by adding the keys to all values\"\"\" COMMAND_INFO = 7", "MSG_TYPE_EVENT = 3 # Command Info COMMAND_LIST = 0 EVENT_LIST = 1 NAME", "msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name, message_type): print prelude", "print \" service:\", service print \" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if", "tag_manager = TagManager() class MessageMap(object): \"\"\" to create a description map of all", "3 # Command Info COMMAND_LIST = 0 EVENT_LIST = 1 NAME = 0", "( [create_check(check) for check in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def has_map():", "used to pretty print the payloads by adding the keys to all values\"\"\"", "bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name = None if message_map: name = message_map.get(service,", "TagManager() class MessageMap(object): \"\"\" to create a description map of all messages to", "else: print \"getting host info failed\" def request_enums(self): for service in self._service_infos: tag", "message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def __init__(self, services, connection, callback, context,", "raw_enums, ret): NAME = 1 FIELD_LIST = 2 FIELD_NAME = 0 FIELD_TYPE =", "in self._map: if not self._print_map_services or service in self._print_map_services: self.pretty_print_object(self._map[service], 1, [], service)", "definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item == None: value", "= \"\\\"%s\\\"\" % item try: print \"%s%s: %s\" % ( indent * INDENT,", "check in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod", "%s\" % ( indent * INDENT, name, value) except: print \"%s%s: %s%s\" %", "loads as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1", "item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item try: print \"%s%s: %s\" % (", "7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT = \"", "tag = 1 while True: if not tag in self._tags: return tag tag", "if not self._service_infos[service][prop]: return False return True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg):", "check(command): return True return False # =========================== # pretty print STP/0 messages #", "filter_obj = eval(code) except: print \"parsing the specified filter failed\" print \"parsed filter:\",", "indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service =", "obj[key]: print '%s%s: [' % (indent * INDENT, self.quote(key)) for item in obj[key]:", "pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item and \"message\" in definition: print \"%s%s:\"", "= re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret = [] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\",", "# =========================== # pretty print STP/0 messages # =========================== def pretty_print_XML(prelude, in_string, format):", "if not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list", "= 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR =", "False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\",", "if matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()])", "msg[MSG_ID] == id: return msg return None def get_enum(self, list, id): enums =", "- 1) >= FIELD_ID and field[FIELD_ID]: if name in parsed_list: field_obj['message'] = parsed_list[name]['message']", "for command in command_list: command_obj = map[command[NUMBER]] = {} command_obj['name'] = command[NAME] msg", "1 MESSAGE_ID = 2 RESPONSE_ID = 3 # Command MessageInfo MSG_LIST = 0", "msgs = self._service_infos[service]['raw_messages'][MSG_LIST] enums = self._service_infos[service].get('raw_enums', []) for command in command_list: command_obj =", "= 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print prelude", "host_info[5]: if service[0] == \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version", "1 except StopIteration: pass except: raise else: ret = [in_string] in_string = \"\".join(ret).lstrip(LF)", "service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else: print \"handling", "= 4 INDENT = \" \" MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self):", "= 12 INDENT = \" \" filter = None @staticmethod def set_filter(filter): def", "content = file.read() file.close() except: print \"reading filter failed\" try: code = compile(content.replace('\\r\\n',", "print msg print \"failed to pretty print the paylod. wrong message structure?\" print", "self.request_host_info() # =========================== # get the messages from scope # =========================== def request_host_info(self):", "print \"handling of message failed in handle_messages in MessageMap:\" print msg def request_infos(self):", "def check_map_complete(self, prop): for service in self._service_infos: if not self._service_infos[service][prop]: return False return", "# =========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item and \"message\" in", "definitions, indent=2, verbose_debug=False): for item, definition in zip(payload, definitions): if definition[\"q\"] == \"repeated\":", "self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get", "service in self._services: if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = { 'parsed':", "def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT = 3 #", "self.scope_minor_version = versions[1] if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else: print \"getting", "by adding the keys to all values\"\"\" COMMAND_INFO = 7 COMMAND_HOST_INFO = 10", "print '%s%s: [' % (indent * INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item,", "self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message list can be", "if name: name = '%s: ' % self.quote(name) print '%s%s{' % (indent *", "= TagManager() class MessageMap(object): \"\"\" to create a description map of all messages", "return None def get_enum(self, list, id): enums = self.get_msg(list, id) name = enums[1]", "context, map=message_map): self._services = services self.scope_major_version = 0 self.scope_minor_version = 0 self._service_infos =", "create a description map of all messages to be used to pretty print", "self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list == None: self._service_infos[service]['raw_enums'] = enum_list and", "keys: if isinstance(obj[key], list): if key == \"message\": if obj['message_name'] in c_list: print", "print \" cid:\", msg[MSG_KEY_CLIENT_ID] if MSG_KEY_UUID in msg: print \" uuid:\", msg[MSG_KEY_UUID] if", "def set_filter(filter): def create_check(check): def check_default(in_str): return check == in_str def check_endswith(in_str): return", "\" service:\", service print \" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS", "if (len(field) - 1) >= FIELD_ID and field[FIELD_ID]: if name in parsed_list: field_obj['message']", "CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count *", "self.pretty_print_message_map() self._connection.clear_msg_handler() self._callback() self._services = None self._service_infos = None self._map = None self._connection", "% service }) else: print \"handling of message failed in handle_info in MessageMap:\"", "maps # ========================= def pretty_print_object(self, obj, indent, c_list, name=''): INDENT = ' '", "if service[0] == \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version =", "[], 1]' % service }) def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and", "'required', 'type': field[FIELD_TYPE], } if (len(field) - 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q']", "raw_enums, field_obj['message']) if (len(field) - 1) >= ENUM_ID and field[ENUM_ID]: name, numbers =", "TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4 print", "tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT:", "3: for enum in enums[2]: dict[enum[1]] = enum[0] return name, dict def parse_msg(self,", "check.strip('*') return check_startswith if check.startswith('*'): check = check.strip('*') return check_endswith return check_default content", "else: print '%s%s: [],' % (indent * INDENT, self.quote(key)) indent -= 1 print", "[], service) print '}' def quote(self, value): return isinstance(value, str) and '\"%s\"' %", "> MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item", "map of all messages to be used to pretty print the payloads by", "print \"failed to pretty print the paylod. wrong message structure?\" print \"%spayload: %s\"", "if not verbose_debug and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else:", "definition[\"message\"], indent=indent+1) else: value = item if \"enum\" in definition: value = \"%s", "event_list: event_obj = map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID])", "callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return False tag_manager = TagManager()", "self.parse_msg(msg, msgs, {}, enums, []) # ========================= # pretty print message maps #", "list: if msg[MSG_ID] == id: return msg return None def get_enum(self, list, id):", "\" filter = None @staticmethod def set_filter(filter): def create_check(check): def check_default(in_str): return check", "== None: value = \"null\" elif isinstance(item, unicode): if not verbose_debug and len(item)", "**args) return True return False tag_manager = TagManager() class MessageMap(object): \"\"\" to create", "of all messages to be used to pretty print the payloads by adding", "in definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item == None:", "message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service, command_name, message_type): print prelude if format: print", "== \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if", "print STP/0 messages # =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP", "in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True if check in \"*\":", "command_name, message_type): print prelude if format: print \" message type:\", message_type print \"", "matches[TEXT] or last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match", "in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message list can", "msg[MSG_KEY_TAG] if format_payload and not msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \"", "= [] self.parse_msg(msg, msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) - 1) >= ENUM_ID", "len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" %", "in MessageMap:\" print msg def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and service", "'%s: ' % self.quote(name) print '%s%s{' % (indent * INDENT, name) indent +=", "command_def.get(\"name\", None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not", "OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else: last_match = OPENING_TAG ret.extend([LF, indent_count *", "msg_list, parsed_list, raw_enums, field_obj['message']) if (len(field) - 1) >= ENUM_ID and field[ENUM_ID]: name,", "\"optional\", 2: \"repeated\" } if msg: for field in msg[FIELD_LIST]: name = field[FIELD_NAME]", "None if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or cmd_id def", "try: file = open(filter, 'rb') content = file.read() file.close() except: print \"reading filter", "if \"enum\" in definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item", "4 INDENT = \" \" MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self): self._counter", "maps # ======================= def get_msg(self, list, id): MSG_ID = 0 for msg in", "None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: # print msg print \"failed", "* INDENT, m.group()]) last_match = CLOSING_TAG elif matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match", "payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception,", "else: print \"handling of message failed in handle_messages in MessageMap:\" print msg def", "specified filter failed\" print \"parsed filter:\", filter_obj if filter_obj: for service in filter_obj:", "self._connection = None self._callback = None def check_map_complete(self, prop): for service in self._service_infos:", "and field[FIELD_ID]: if name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else:", "MSG_KEY_COMMAND_ID = 2 MSG_KEY_FORMAT = 3 MSG_KEY_STATUS = 4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID", "tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_ENUM_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG:", "print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \" cid:\", msg[MSG_KEY_CLIENT_ID] if", "filter failed\" print \"parsed filter:\", filter_obj if filter_obj: for service in filter_obj: for", "def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service", "def __init__(self): self._counter = 1 self._tags = {} def _get_empty_tag(self): tag = 1", "MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\"]' % service })", "\"message\" in definition: print \"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1)", "msg_list, parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST = 2 FIELD_NAME = 0", "= enum[0] return name, dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME", "last_match == OPENING_TAG: ret.append(m.group()) else: ret.extend([LF, indent_count * INDENT, m.group()]) last_match = CLOSING_TAG", "print prelude if format: if in_string.startswith(\"<\"): in_string = re.sub(r\"<\\?[^>]*>\", \"\", in_string) ret =", "else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\" else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\"", "MessageMap.filter[service]: for check in MessageMap.filter[service][message_type]: if check(command): return True return False # ===========================", "= 7 COMMAND_HOST_INFO = 10 COMMAND_MESSAGE_INFO = 11 COMMAND_ENUM_INFO = 12 INDENT =", "MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service }) def handle_enums(self, msg,", "context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the messages", "or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) else:", "EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj = map[event[NUMBER]] = {}", "def get_msg(self, list, id): MSG_ID = 0 for msg in list: if msg[MSG_ID]", "if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums': False,", "except: print \"failed evaling message in parse_json\" return payload try: from json import", "= OPENING_TAG ret.extend([LF, indent_count * INDENT, m.group()]) indent_count += 1 except StopIteration: pass", "to be used to pretty print the payloads by adding the keys to", "handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD])", "None) or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter", "file.read() file.close() except: print \"reading filter failed\" try: code = compile(content.replace('\\r\\n', '\\n'), filter,", "service in filter_obj: for type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check", "def parse_msg(self, msg, msg_list, parsed_list, raw_enums, ret): NAME = 1 FIELD_LIST = 2", "item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent,", "StopIteration: pass except: raise else: ret = [in_string] in_string = \"\".join(ret).lstrip(LF) print in_string", "'message map:' print '{' for service in self._map: if not self._print_map_services or service", "MSG_KEY_PAYLOAD: '[\"%s\", [], 1]' % service }) def handle_enums(self, msg, service): if not", "name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name'] else: parsed_list[name] = field_obj", "'%s\"message\": <circular reference>,' % ( indent * INDENT) continue else: c_list.append(obj['message_name']) if obj[key]:", "print '%s},' % (indent * INDENT) def pretty_print_message_map(self): print 'message map:' print '{'", "service[0] == \"scope\": versions = map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1]", "FIELD_NUMBER = 2 FIELD_Q = 3 FIELD_ID = 4 ENUM_ID = 5 Q_MAP", "= \"null\" elif isinstance(item, unicode): if not verbose_debug and len(item) > MAX_STR_LENGTH: value", "else: print \" payload:\", msg[MSG_KEY_PAYLOAD], \"\\n\" else: print msg def check_message(service, command, message_type):", "self._map = map self._connection = connection self._callback = callback self._print_map = context.print_message_map self._print_map_services", "get_cmd_name(service, cmd_id): name = None if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return", "'eval') filter_obj = eval(code) except: print \"parsing the specified filter failed\" print \"parsed", "in filter_obj[service][type]] ) MessageMap.filter = filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod def", "_parse_json(msg): payload = None try: payload = eval(msg.replace(\",null\", \",None\")) except: print \"failed evaling", "FIELD_Q = 3 FIELD_ID = 4 ENUM_ID = 5 Q_MAP = { 0:", "self.get_msg(msgs, command[RESPONSE_ID]) command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1", "= check.strip('*') return check_endswith return check_default content = filter filter_obj = None import", "'%s%s: [' % (indent * INDENT, self.quote(key)) for item in obj[key]: self.pretty_print_object(item, indent", "' ' c_list = [] + c_list if name: name = '%s: '", "if command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND,", "not msg[MSG_KEY_STATUS] and service in self._service_infos: enum_list = parse_json(msg[MSG_KEY_PAYLOAD]) if not enum_list ==", "self.pretty_print_object(self._map[service], 1, [], service) print '}' def quote(self, value): return isinstance(value, str) and", "name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name': name, 'numbers': numbers} ret.append(field_obj) return", "# ========================= def pretty_print_object(self, obj, indent, c_list, name=''): INDENT = ' ' c_list", "\"enum\" in definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item ==", "0 for msg in list: if msg[MSG_ID] == id: return msg return None", "in self._services: if not service.startswith('core-') and not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False,", "ret.extend([LF, indent_count * INDENT, m.group()]) indent_count += 1 except StopIteration: pass except: raise", "+= 1 except StopIteration: pass except: raise else: ret = [in_string] in_string =", ">= FIELD_ID and field[FIELD_ID]: if name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] =", "if filter_obj: for service in filter_obj: for type in filter_obj[service]: filter_obj[service][type] = (", "msg[MSG_KEY_STATUS] and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the", "id) name = enums[1] dict = {} if enums and len(enums) == 3:", "re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m = matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]:", "in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages,", "= re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m = matches_iter.next() matches = m.groups() if", "'rb') content = file.read() file.close() except: print \"reading filter failed\" try: code =", "[] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message failed", "common import Singleton from maps import status_map, format_type_map, message_type_map, message_map def _parse_json(msg): payload", "self.scope_minor_version = 0 self._service_infos = {} self._map = map self._connection = connection self._callback", "self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\",", "= 0 NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID = 3 # Command", "tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_INFO, MSG_KEY_FORMAT:", "% (indent * INDENT, definition['name']) for sub_item in item: pretty_print_payload_item( indent + 1,", "0 TAG = 1 CLOSING_TAG = 2 OPENING_CLOSING_TAG = 3 OPENING_TAG = 4", "pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP 0 messages\"\"\" LF = \"\\n\" TEXT", "handle_info in MessageMap:\" print msg def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and", "None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'):", "else: c_list.append(obj['message_name']) if obj[key]: print '%s%s: [' % (indent * INDENT, self.quote(key)) for", "return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id): name = None if message_map: name =", "service }) else: print \"handling of message failed in handle_info in MessageMap:\" print", "if format: print \" message type:\", message_type print \" service:\", service print \"", "0 self._service_infos = {} self._map = map self._connection = connection self._callback = callback", "\"handling of message failed in handle_messages in MessageMap:\" print msg def request_infos(self): for", "= Q_MAP[field[FIELD_Q]] if (len(field) - 1) >= FIELD_ID and field[FIELD_ID]: if name in", "msg print \"failed to pretty print the paylod. wrong message structure?\" print \"%spayload:", "= map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >= 1:", "indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m = matches_iter.next()", "and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling", "else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False):", "return False tag_manager = TagManager() class MessageMap(object): \"\"\" to create a description map", "print STP/1 messages # =========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item", "message failed in handle_messages in MessageMap:\" print msg def request_infos(self): for service in", "None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON,", "MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' })", "= {} command_obj['name'] = command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs,", "and not service.startswith('stp-'): self._service_infos[service] = { 'parsed': False, 'parsed_enums': False, 'raw_infos': None, 'raw_messages':", "\"parsed filter:\", filter_obj if filter_obj: for service in filter_obj: for type in filter_obj[service]:", "definition['name']) for sub_item in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item,", "= command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID:", "verbose_debug=False): for item, definition in zip(payload, definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\"", "item, definition in zip(payload, definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\" % (indent", "= \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item == None: value = \"null\"", "parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0 MSG_KEY_SERVICE = 1 MSG_KEY_COMMAND_ID =", "MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" % item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item try:", "definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude, msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE]", "or \\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or", "{'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_MESSAGE_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag,", "service print \" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg:", "c_list, key) for key in keys: if isinstance(obj[key], list): if key == \"message\":", "service:\", service print \" command:\", command_name print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in", "4 MSG_KEY_TAG = 5 MSG_KEY_CLIENT_ID = 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8", "not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] =", "self._callback() self._services = None self._service_infos = None self._map = None self._connection = None", "value): return isinstance(value, str) and '\"%s\"' % value or value # =========================== #", "return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True if check in", "self._callback = None def check_map_complete(self, prop): for service in self._service_infos: if not self._service_infos[service][prop]:", "parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message list can be empty (e.g. for", "service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag =", "pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'],", "if not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos']", "from json import loads as parse_json except: globals()['parse_json'] = _parse_json MSG_KEY_TYPE = 0", "= parse_json(msg[MSG_KEY_PAYLOAD]) if host_info: for service in host_info[5]: if service[0] == \"scope\": versions", "to pretty print the payloads by adding the keys to all values\"\"\" COMMAND_INFO", "% item[0:MAX_STR_LENGTH] else: value = \"\\\"%s\\\"\" % item try: print \"%s%s: %s\" %", "enums, []) # ========================= # pretty print message maps # ========================= def pretty_print_object(self,", "for key in keys: if not (isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s:", "None: value = \"null\" elif isinstance(item, unicode): if not verbose_debug and len(item) >", "command, message_type): if MessageMap.filter and service in MessageMap.filter and \\ message_type in MessageMap.filter[service]:", "= 3 # Command Info COMMAND_LIST = 0 EVENT_LIST = 1 NAME =", "4 ENUM_ID = 5 Q_MAP = { 0: \"required\", 1: \"optional\", 2: \"repeated\"", "= m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or last_match == OPENING_TAG:", "= 2 FIELD_NAME = 0 FIELD_TYPE = 1 FIELD_NUMBER = 2 FIELD_Q =", "indent_count * INDENT, m.group()]) indent_count += 1 except StopIteration: pass except: raise else:", "failed in default_msg_handler in MessageMap:\" print msg # ======================= # create the message", "in self._service_infos: if not self._service_infos[service][prop]: return False return True def default_msg_handler(self, msg): if", "key == \"message\": if obj['message_name'] in c_list: print '%s\"message\": <circular reference>,' % (", "def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print STP 0 messages\"\"\" LF = \"\\n\"", "in item: pretty_print_payload_item( indent + 1, definition['name'].replace(\"List\", \"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item(", "= item if \"enum\" in definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item], item)", "msg def check_message(service, command, message_type): if MessageMap.filter and service in MessageMap.filter and \\", "= 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON = 1 MSG_TYPE_ERROR = 4 INDENT =", "MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag, MSG_KEY_PAYLOAD: '[\"%s\", [], 1, 1]' % service }) else: print", "isinstance(obj[key], dict): self.pretty_print_object(obj[key], indent, c_list, key) for key in keys: if isinstance(obj[key], list):", "filter_obj if filter_obj: for service in filter_obj: for type in filter_obj[service]: filter_obj[service][type] =", "service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >= 1: self.request_enums() else:", "for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info, {\"service\": service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE:", "info failed\" def request_enums(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service})", "self.pretty_print_object(obj[key], indent, c_list, key) for key in keys: if isinstance(obj[key], list): if key", "(%s)\" % (definition['enum']['numbers'][item], item) elif item == None: value = \"null\" elif isinstance(item,", "indent * INDENT, name, value[0:100], '...') def pretty_print_payload(payload, definitions, indent=2, verbose_debug=False): for item,", "(isinstance(obj[key], dict) or isinstance(obj[key], list)): print '%s%s: %s,' % (indent * INDENT, self.quote(key),", "else: self.request_infos() else: print \"getting host info failed\" def request_enums(self): for service in", "filter filter_obj = None import os if os.path.isfile(filter): try: file = open(filter, 'rb')", "\\ '<id: %d>' % msg[MSG_KEY_COMMAND_ID] message_type = message_type_map[msg[MSG_KEY_TYPE]] if not MessageMap.filter or check_message(service,", "filter_obj: for type in filter_obj[service]: filter_obj[service][type] = ( [create_check(check) for check in filter_obj[service][type]]", "}) def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list", "set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag] = (callback, args) return tag def", "+= 1 keys = obj.keys() for key in keys: if not (isinstance(obj[key], dict)", "if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else: print \"handling of", "% service }) def handle_enums(self, msg, service): if not msg[MSG_KEY_STATUS] and service in", "if payload and command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except", "ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2 MSG_TYPE_EVENT", "print \"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else: value =", "and service in self._service_infos: command_list = parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag", "\"*\": return check_pass if check.endswith('*'): check = check.strip('*') return check_startswith if check.startswith('*'): check", "%s\" % (INDENT, payload) print \"%sdefinition: %s\" % (INDENT, definition) else: print \"", "= 0 self._service_infos = {} self._map = map self._connection = connection self._callback =", "# =========================== # get the messages from scope # =========================== def request_host_info(self): for", "False, 'raw_infos': None, 'raw_messages': None } self._connection.send_command_STP_1({ MSG_KEY_TYPE: MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO,", "self._callback = callback self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() #", "== None: self._service_infos[service]['raw_enums'] = enum_list and enum_list[0] or [] self._service_infos[service]['parsed_enums'] = True if", "self._tags = {} def _get_empty_tag(self): tag = 1 while True: if not tag", "1) >= ENUM_ID and field[ENUM_ID]: name, numbers = self.get_enum(raw_enums, field[ENUM_ID]) field_obj['enum'] = {'name':", "or value # =========================== # pretty print STP/1 messages # =========================== def pretty_print_payload_item(indent,", "= parsed_list[name]['message_name'] else: parsed_list[name] = field_obj msg = self.get_msg(msg_list, field[FIELD_ID]) field_obj['message_name'] = msg", "type:\", message_type print \" service:\", service print \" command:\", command_name print \" format:\",", "if os.path.isfile(filter): try: file = open(filter, 'rb') content = file.read() file.close() except: print", "MSG_ID = 0 map = self._map[service] = {} command_list = self._service_infos[service]['raw_infos'][COMMAND_LIST] msgs =", "for field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = { 'name': name, 'q':", "print \"%sdefinition: %s\" % (INDENT, definition) else: print \" \", msg[MSG_KEY_PAYLOAD] print \"\\n\"", "check_message(service, command, message_type): if MessageMap.filter and service in MessageMap.filter and \\ message_type in", "def request_enums(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_enums, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE:", "default_msg_handler in MessageMap:\" print msg # ======================= # create the message maps #", "msg: # print msg print \"failed to pretty print the paylod. wrong message", "1 MSG_TYPE_ERROR = 4 INDENT = \" \" MAX_STR_LENGTH = 50 class TagManager(Singleton):", "3 FIELD_ID = 4 ENUM_ID = 5 Q_MAP = { 0: \"required\", 1:", "msg[MSG_KEY_TYPE] == MSG_TYPE_ERROR: payload = parse_json(msg[MSG_KEY_PAYLOAD]) print \" payload:\" if payload and command_def:", "command_obj[MSG_TYPE_RESPONSE] = self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST:", "service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name = command_def and command_def.get(\"name\",", "class TagManager(Singleton): def __init__(self): self._counter = 1 self._tags = {} def _get_empty_tag(self): tag", "self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # =========================== # get the messages from", "name = None if message_map: name = message_map.get(service, {}).get(int(cmd_id), {}).get(\"name\") return name or", "map(int, service[1].split('.')) self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >= 1: self.request_enums()", "if msg[MSG_KEY_TAG] in self._tags: callback, args = self._tags.pop(msg[MSG_KEY_TAG]) callback(msg, **args) return True return", "= callback self._print_map = context.print_message_map self._print_map_services = filter(bool, context.print_message_map_services.split(',')) self._connection.set_msg_handler(self.default_msg_handler) self.request_host_info() # ===========================", "= 11 COMMAND_ENUM_INFO = 12 INDENT = \" \" filter = None @staticmethod", "11 COMMAND_ENUM_INFO = 12 INDENT = \" \" filter = None @staticmethod def", "of message failed in default_msg_handler in MessageMap:\" print msg # ======================= # create", "= 6 MSG_KEY_UUID = 7 MSG_KEY_PAYLOAD = 8 MSG_VALUE_COMMAND = 1 MSG_VALUE_FORMAT_JSON =", "= self.parse_msg(msg, msgs, {}, enums, []) if len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list", "matches[OPENING_CLOSING_TAG] or \"<![CDATA[\" in matches[1]: last_match = OPENING_CLOSING_TAG ret.extend([LF, indent_count * INDENT, m.group()])", "======================= # create the message maps # ======================= def get_msg(self, list, id): MSG_ID", "numbers} ret.append(field_obj) return ret def parse_raw_lists(self, service): MSG_TYPE_COMMAND = 1 MSG_TYPE_RESPONSE = 2", "failed in handle_info in MessageMap:\" print msg def handle_messages(self, msg, service): if not", "msg, format, format_payload, verbose_debug=False): service = msg[MSG_KEY_SERVICE] command_def = message_map.get(service, {}).get(msg[MSG_KEY_COMMAND_ID], None) command_name", "msg def handle_messages(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: message_list", "= { 'name': name, 'q': 'required', 'type': field[FIELD_TYPE], } if (len(field) - 1)", "= self.parse_msg(msg, msgs, {}, enums, []) # ========================= # pretty print message maps", "command_def: definition = command_def.get(msg[MSG_KEY_TYPE], None) try: pretty_print_payload(payload, definition, verbose_debug=verbose_debug) except Exception, msg: #", "+= 1 def set_callback(self, callback, args={}): tag = self._get_empty_tag() self._tags[tag] = (callback, args)", "in \"*\": return check_pass if check.endswith('*'): check = check.strip('*') return check_startswith if check.startswith('*'):", "check_endswith return check_default content = filter filter_obj = None import os if os.path.isfile(filter):", "= command[NAME] msg = self.get_msg(msgs, command[MESSAGE_ID]) command_obj[MSG_TYPE_COMMAND] = self.parse_msg(msg, msgs, {}, enums, [])", "callback, args={}): tag = self._get_empty_tag() self._tags[tag] = (callback, args) return tag def handle_message(self,", "[], 1, 1]' % service }) else: print \"handling of message failed in", "check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True if check in \"*\": return check_pass", "parse_json(msg[MSG_KEY_PAYLOAD]) if command_list: self._service_infos[service]['raw_infos'] = command_list tag = tag_manager.set_callback(self.handle_messages, {'service': service}) self._connection.send_command_STP_1({ MSG_KEY_TYPE:", "\"%s (%s)\" % (definition['enum']['numbers'][item], item) elif item == None: value = \"null\" elif", "= eval(code) except: print \"parsing the specified filter failed\" print \"parsed filter:\", filter_obj", "* INDENT, name, value) except: print \"%s%s: %s%s\" % ( indent * INDENT,", "messages # =========================== def pretty_print_payload_item(indent, name, definition, item, verbose_debug=False): if item and \"message\"", "if (len(field) - 1) >= FIELD_Q and field[FIELD_Q]: field_obj['q'] = Q_MAP[field[FIELD_Q]] if (len(field)", "print \"%s%s: %s\" % ( indent * INDENT, name, value) except: print \"%s%s:", "print \"handling of message failed in default_msg_handler in MessageMap:\" print msg # =======================", "EVENT_LIST = 1 NAME = 0 NUMBER = 1 MESSAGE_ID = 2 RESPONSE_ID", "id): MSG_ID = 0 for msg in list: if msg[MSG_ID] == id: return", "of message failed in handle_messages in MessageMap:\" print msg def finalize(self): if self._print_map:", "{} if enums and len(enums) == 3: for enum in enums[2]: dict[enum[1]] =", "= matches_iter.next() matches = m.groups() if matches[CLOSING_TAG]: indent_count -= 1 if matches[TEXT] or", "pretty print STP/0 messages # =========================== def pretty_print_XML(prelude, in_string, format): \"\"\"To pretty print", "if msg: for field in msg[FIELD_LIST]: name = field[FIELD_NAME] field_obj = { 'name':", "len(self._service_infos[service]['raw_infos']) - 1 >= EVENT_LIST: event_list = self._service_infos[service]['raw_infos'][EVENT_LIST] for event in event_list: event_obj", ") MessageMap.filter = filter_obj @staticmethod def has_map(): return bool(message_map) @staticmethod def get_cmd_name(service, cmd_id):", "def handle_info(self, msg, service): if not msg[MSG_KEY_STATUS] and service in self._service_infos: command_list =", "print 'message map:' print '{' for service in self._map: if not self._print_map_services or", "in MessageMap:\" print msg def request_infos(self): for service in self._service_infos: tag = tag_manager.set_callback(self.handle_info,", "\"\"), definition, sub_item, verbose_debug=verbose_debug) else: pretty_print_payload_item( indent, definition['name'], definition, item, verbose_debug=verbose_debug) def pretty_print(prelude,", "print \" format:\", format_type_map[msg[MSG_KEY_FORMAT]] if MSG_KEY_STATUS in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if", "self.check_map_complete('parsed_enums'): self.request_infos() else: print \"handling of message failed in handle_messages in MessageMap:\" print", "MSG_TYPE_ERROR = 4 INDENT = \" \" MAX_STR_LENGTH = 50 class TagManager(Singleton): def", "def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True if", "in_str def check_endswith(in_str): return in_str.endswith(check) def check_startswith(in_str): return in_str.startswith(check) def check_pass(in_str): return True", "return True def default_msg_handler(self, msg): if not tag_manager.handle_message(msg): print \"handling of message failed", "FIELD_ID and field[FIELD_ID]: if name in parsed_list: field_obj['message'] = parsed_list[name]['message'] field_obj['message_name'] = parsed_list[name]['message_name']", "len(enums) == 3: for enum in enums[2]: dict[enum[1]] = enum[0] return name, dict", "= map[event[NUMBER]] = {} event_obj['name'] = event[NAME] msg = self.get_msg(msgs, event[MESSAGE_ID]) event_obj[MSG_TYPE_EVENT] =", "print \"%s%s:\" % (indent * INDENT, definition['name']) for sub_item in item: pretty_print_payload_item( indent", "= [] indent_count = 0 matches_iter = re.finditer(r\"([^<]*)(<(\\/)?[^>/]*(\\/)?>)\", in_string) try: while True: m", "in definition: print \"%s%s:\" % (indent * INDENT, name) pretty_print_payload(item, definition[\"message\"], indent=indent+1) else:", "versions[1] if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos() else: print \"getting host info", "\"parsing the specified filter failed\" print \"parsed filter:\", filter_obj if filter_obj: for service", "% ( indent * INDENT, name, value) except: print \"%s%s: %s%s\" % (", "cmd_id def __init__(self, services, connection, callback, context, map=message_map): self._services = services self.scope_major_version =", "= field[FIELD_NAME] field_obj = { 'name': name, 'q': 'required', 'type': field[FIELD_TYPE], } if", "and service in self._service_infos: message_list = parse_json(msg[MSG_KEY_PAYLOAD]) self._service_infos[service]['raw_messages'] = message_list # the message", "in msg: print \" status:\", status_map[msg[MSG_KEY_STATUS]] if MSG_KEY_CLIENT_ID in msg: print \" cid:\",", "messages from scope # =========================== def request_host_info(self): for service in self._services: if not", "for item, definition in zip(payload, definitions): if definition[\"q\"] == \"repeated\": print \"%s%s:\" %", "isinstance(item, unicode): if not verbose_debug and len(item) > MAX_STR_LENGTH: value = \"\\\"%s...\\\"\" %", "FIELD_ID = 4 ENUM_ID = 5 Q_MAP = { 0: \"required\", 1: \"optional\",", "enums[2]: dict[enum[1]] = enum[0] return name, dict def parse_msg(self, msg, msg_list, parsed_list, raw_enums,", "for the 'core' service) if message_list: self.parse_raw_lists(service) self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize()", "self.scope_major_version = versions[0] self.scope_minor_version = versions[1] if self.scope_minor_version >= 1: self.request_enums() else: self.request_infos()", "MSG_VALUE_COMMAND, MSG_KEY_SERVICE: \"scope\", MSG_KEY_COMMAND_ID: self.COMMAND_HOST_INFO, MSG_KEY_FORMAT: MSG_VALUE_FORMAT_JSON, MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def", "item if \"enum\" in definition: value = \"%s (%s)\" % (definition['enum']['numbers'][item], item) elif", "def get_enum(self, list, id): enums = self.get_msg(list, id) name = enums[1] dict =", "MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info = parse_json(msg[MSG_KEY_PAYLOAD]) if", "list, id): enums = self.get_msg(list, id) name = enums[1] dict = {} if", "self._service_infos[service]['parsed'] = True if self.check_map_complete('parsed'): self.finalize() else: print \"handling of message failed in", "INDENT = \" \" MAX_STR_LENGTH = 50 class TagManager(Singleton): def __init__(self): self._counter =", "MSG_KEY_TAG: tag_manager.set_callback(self.handle_host_info), MSG_KEY_PAYLOAD: '[]' }) def handle_host_info(self, msg): if not msg[MSG_KEY_STATUS]: host_info =", "None def check_map_complete(self, prop): for service in self._service_infos: if not self._service_infos[service][prop]: return False", "= 0 EVENT_LIST = 1 NAME = 0 NUMBER = 1 MESSAGE_ID =" ]
[ "math import sys import csv #Configuración de la conexión a Mysql try: cnx", "(var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+ var1+ \" \") print(\"Artists", "(`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES", "the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if", "\") sys.exit(1) # the connection is not autocommited by default. So we must", "columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id',", "var 1: \"+ var1+ \" \") print(\"Track var 2: \"+ var2+ \" \")", "(`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return string != string for i", "return string != string for i in df_tracks.index: # Execute the query var1=", "(%s, %s)\" def isNaN(string): return string != string for i in df_tracks.index: #", "2: \"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection is", "df_artist.index: # Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c", "var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not", "not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i])", "['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError", "= ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10 ** 6", "errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err) cursor = cnx.cursor() #Lectura del dataframe", "c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027',", "mysql.connector from mysql.connector import errorcode import math import sys import csv #Configuración de", "VALUES (%s, %s)\" def isNaN(string): return string != string for i in df_tracks.index:", "record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists =", "= ['trackId','trackname'] ) chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize,", "= \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return string", "for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish", "isNaN(string): return string != string for i in df_tracks.index: # Execute the query", "isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n',", "import math import sys import csv #Configuración de la conexión a Mysql try:", "not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i])", "string for i in df_tracks.index: # Execute the query var1= None if isNaN(df_tracks['trackId'][i])", "i in df_tracks.index: # Execute the query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c", "the connection is not autocommited by default. So we must commit to save", "from mysql.connector import errorcode import math import sys import csv #Configuración de la", "as err: print(\"Track var 1: \"+ var1+ \" \") print(\"Track var 2: \"+", "df_tracks = df_tracks.reset_index(drop=True) # Create a new record sql_tracks = \"INSERT INTO `tracks`", "\"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection is not", "10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv',", "+ df_tracks['trackname'][i]) sys.exit(1) # the connection is not autocommited by default. So we", "password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is", "import pandas as pd import mysql.connector from mysql.connector import errorcode import math import", "string != string for i in df_tracks.index: # Execute the query var1= None", "in df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']])", "\"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return string !=", "#print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var 1: \"+ var1+", "your user name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\")", "# Create a new record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES", "print(\"Artists var 2: \"+ var2+ \" \") sys.exit(1) # the connection is not", "import csv #Configuración de la conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.',", "chunksize=chunksize, header=None, names=columns_data) as reader: for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']])", "df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2=", "i in df_artist.index: # Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c", "c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if", "'\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip()", "for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C',", "as err: print(\"Artists var 1: \"+ var1+ \" \") print(\"Artists var 2: \"+", "autocommited by default. So we must commit to save our changes. cnx.commit() for", "['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize", "if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try:", "2: \"+ var2+ \" \") sys.exit(1) # the connection is not autocommited by", ") chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as", "if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user name or password\")", "'\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for", "is wrong with your user name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database", "reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for chunk in", "not autocommited by default. So we must commit to save our changes. cnx.commit()", "df_tracks['trackname'][i]) sys.exit(1) # the connection is not autocommited by default. So we must", "header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for", "mysql.connector import errorcode import math import sys import csv #Configuración de la conexión", "we must commit to save our changes. cnx.commit() for i in df_artist.index: #", "'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv',", "mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+ var1+ \" \") print(\"Artists var 2:", "c in df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027',", "'\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as", "cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if err.errno ==", "\"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT INTO `artists`", "del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns", "mysql.connector.errors.DataError as err: print(\"Track var 1: \"+ var1+ \" \") print(\"Track var 2:", "`tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`)", "'\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if", "var 1: \"+ var1+ \" \") print(\"Artists var 2: \"+ var2+ \" \")", "pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for chunk in reader: df_artist", "elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err) cursor = cnx.cursor()", "mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user", "file and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first')", "df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a new record sql_tracks =", "print(\"Artists var 1: \"+ var1+ \" \") print(\"Artists var 2: \"+ var2+ \"", "# the connection is not autocommited by default. So we must commit to", "'\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c", "dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns =", "else ''.join([c for c in df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n', '\\f',", "'\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var", "errorcode import math import sys import csv #Configuración de la conexión a Mysql", "chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading", "df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a", "#Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist=", "c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists,", "exist\") else: print(err) cursor = cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']", "'\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var 1:", "df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\",", "as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user name", "delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as", "is not autocommited by default. So we must commit to save our changes.", "c in df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027',", "['trackId','trackname'] ) chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None)", "** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\",", "in df_artist.index: # Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for", "var1+ \" \") print(\"Artists var 2: \"+ var2+ \" \") sys.exit(1) # the", "df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a new record sql_tracks = \"INSERT", "= mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:", "for c in df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027',", "password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err) cursor =", "= cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id',", "default. So we must commit to save our changes. cnx.commit() #print(df_artist) cursor.close() cnx.close()", "name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err)", "6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t',", "isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n',", "la conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error", "print(err) cursor = cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns", "# Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in", "in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']])", "`music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return string != string for i in", "in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else", "= df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\") #", "for i in df_artist.index: # Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else", "to save our changes. cnx.commit() for i in df_artist.index: # Execute the query", "else: print(err) cursor = cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits=", "Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip()", "]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+", "#print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+ var1+", "not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2))", "var 2: \"+ var2+ \" \") sys.exit(1) # the connection is not autocommited", "isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n',", "pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None,", "pandas as pd import mysql.connector from mysql.connector import errorcode import math import sys", "VALUES (%s, %s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\"", "names=columns_data) as reader: for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks =", "1: \"+ var1+ \" \") print(\"Artists var 2: \"+ var2+ \" \") sys.exit(1)", "by default. So we must commit to save our changes. cnx.commit() #print(df_artist) cursor.close()", "if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if c not in ['\\t',", "'\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if", "Execute the query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip()", "pd import mysql.connector from mysql.connector import errorcode import math import sys import csv", "INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`,", "autocommited by default. So we must commit to save our changes. cnx.commit() #print(df_artist)", "wrong with your user name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does", "errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user name or password\") elif err.errno ==", "`artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return string != string for", "sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT", "in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else", "with your user name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not", "pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r',", "print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection is not autocommited by default. So", "print(\"Something is wrong with your user name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR:", "['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c", "if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try:", "'\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err:", "INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return string != string", "var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if c not", "cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'])", "= df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first')", "in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except", "\"+ var1+ \" \") print(\"Artists var 2: \"+ var2+ \" \") sys.exit(1) #", "df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\") # Remove duplicates df_artist=", "None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if c not in", "\") print(\"Track var 2: \"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) #", "= df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a new record sql_tracks", "# Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True)", "'\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists", "sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string): return", "'\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip()", "== errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user name or password\") elif err.errno", "Create a new record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s,", "''.join([c for c in df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022',", "df_tracks.index: # Execute the query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c", "print(\"Database does not exist\") else: print(err) cursor = cnx.cursor() #Lectura del dataframe columns_data", "dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks =", "by default. So we must commit to save our changes. cnx.commit() for i", "in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file", "in df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']])", "'\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if c", "try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+ var1+ \"", "or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err) cursor", "'\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track", "chunksize = 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader:", "encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for chunk in reader: df_artist =", "save our changes. cnx.commit() for i in df_artist.index: # Execute the query var1=", "as reader: for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']])", "err: print(\"Track var 1: \"+ var1+ \" \") print(\"Track var 2: \"+ var2+", "df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10", "\" \") print(\"Track var 2: \"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1)", "new record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists", "['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c", "None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if c not in", "conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as", "None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in", "var 2: \"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection", "#Configuración de la conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1')", "Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) #", "''.join([c for c in df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022',", "\" \") sys.exit(1) # the connection is not autocommited by default. So we", "duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create", "sys import csv #Configuración de la conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1',", "'\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for", "= ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name'])", "df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a new record sql_tracks = \"INSERT INTO", "in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except", "'\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var", "== errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err) cursor = cnx.cursor() #Lectura del", "df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\") # Remove", "in df_tracks.index: # Execute the query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for", "['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10 ** 6 #with", "df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\")", "df_tracks.reset_index(drop=True) # Create a new record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`)", "= ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] )", "as pd import mysql.connector from mysql.connector import errorcode import math import sys import", "c in df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"'", "sys.exit(1) # the connection is not autocommited by default. So we must commit", "for i in df_tracks.index: # Execute the query var1= None if isNaN(df_tracks['trackId'][i]) else", "print(\"Track var 1: \"+ var1+ \" \") print(\"Track var 2: \"+ var2+ \"", "'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\") # Remove duplicates", "'\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in df_artist['artist-name'][i].strip() if c", "for c in df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C',", "def isNaN(string): return string != string for i in df_tracks.index: # Execute the", "does not exist\") else: print(err) cursor = cnx.cursor() #Lectura del dataframe columns_data =", "#with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize,", "Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if", "import mysql.connector from mysql.connector import errorcode import math import sys import csv #Configuración", "'\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err:", "query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if c", "var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection is not autocommited", "'\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in", "\" \") print(\"Artists var 2: \"+ var2+ \" \") sys.exit(1) # the connection", "cnx.commit() for i in df_artist.index: # Execute the query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i])", "with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for chunk in reader:", "#df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns =", "None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if c not in", "\") print(\"Artists var 2: \"+ var2+ \" \") sys.exit(1) # the connection is", "delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id',", "df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2=", "\"+ var1+ \" \") print(\"Track var 2: \"+ var2+ \" \") print(\"nooooo\" +", "#print(df_artist) print(\"Finish reading file and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist =", "pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname']", "connection is not autocommited by default. So we must commit to save our", "`music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s,", "err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else: print(err) cursor = cnx.cursor() #Lectura", "pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize = 10 **", "and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks", "var2+ \" \") sys.exit(1) # the connection is not autocommited by default. So", "import errorcode import math import sys import csv #Configuración de la conexión a", "header=None, names=columns_data) as reader: for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks", "reader: for chunk in reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist)", "err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user name or", "if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if c not in ['\\t',", "(%s, %s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def", "try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if err.errno", "'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns", "# Execute the query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in", "isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n',", "'\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as", "if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None", "df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2)", "import sys import csv #Configuración de la conexión a Mysql try: cnx =", "df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2)", "the query var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if", "host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong", "err: print(\"Artists var 1: \"+ var1+ \" \") print(\"Artists var 2: \"+ var2+", "mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something", "cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+ var1+ \" \")", "var1= None if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if c not", "default. So we must commit to save our changes. cnx.commit() for i in", "['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError", "csv #Configuración de la conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1',", "df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a new", "if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t',", "reader: df_artist = df_artist.append(chunk[['musicbrainz-artist-id', 'artist-name']]) df_tracks = df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and", "user name or password\") elif err.errno == errorcode.ER_BAD_DB_ERROR: print(\"Database does not exist\") else:", "= df_tracks.reset_index(drop=True) # Create a new record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`,", "So we must commit to save our changes. cnx.commit() for i in df_artist.index:", "must commit to save our changes. cnx.commit() for i in df_artist.index: # Execute", "c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks,", "our changes. cnx.commit() for i in df_artist.index: # Execute the query var1= None", "not exist\") else: print(err) cursor = cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id',", "print(\"Finish reading file and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True)", "'\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c in", "['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks=", "except mysql.connector.errors.DataError as err: print(\"Artists var 1: \"+ var1+ \" \") print(\"Artists var", "df_artist = df_artist.reset_index(drop=True) df_tracks= df_tracks.drop_duplicates(keep='first') df_tracks = df_tracks.reset_index(drop=True) # Create a new record", "a new record sql_tracks = \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\"", "df_tracks.append(chunk[['trackId','trackname']]) #print(df_artist) print(\"Finish reading file and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist", "%s)\" sql_artists = \"INSERT INTO `artists` (`music_artist_id`, `music_artist_name`) VALUES (%s, %s)\" def isNaN(string):", "print(\"Track var 2: \"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the", "%s)\" def isNaN(string): return string != string for i in df_tracks.index: # Execute", "\") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection is not autocommited by default.", "(var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var 1: \"+ var1+ \" \") print(\"Track", "query var1= None if isNaN(df_artist['musicbrainz-artist-id'][i]) else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c", "''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022',", "= 10 ** 6 #with pd.read_csv('data/userid-timestamp-artid-artname-traid-traname.tsv', encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with", "in df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C','\\u0027', '\"' ]])", "for c in df_tracks['trackname'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C',", "\"+ var2+ \" \") sys.exit(1) # the connection is not autocommited by default.", "= \"INSERT INTO `tracks` (`music_track_id`, `music_track_name`) VALUES (%s, %s)\" sql_artists = \"INSERT INTO", "not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2))", "as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader: for chunk", "cursor = cnx.cursor() #Lectura del dataframe columns_data = ['userId','timestamp','musicbrainz-artist-id', 'artist-name','trackId','trackname'] #df_use_habits= pd.DataFrame(columns =", "else ''.join([c for c in df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n', '\\f',", "''.join([c for c in df_artist['artist-name'][i].strip() if c not in ['\\t', '\\n', '\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022',", "else ''.join([c for c in df_tracks['trackId'][i].strip() if c not in ['\\t', '\\n', '\\f',", "try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var 1: \"+ var1+ \"", "else ''.join([c for c in df_artist['musicbrainz-artist-id'][i].strip() if c not in ['\\t', '\\n', '\\f',", "except mysql.connector.errors.DataError as err: print(\"Track var 1: \"+ var1+ \" \") print(\"Track var", "!= string for i in df_tracks.index: # Execute the query var1= None if", "except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your", "'artist-name','trackId','trackname']) df_artist= pd.DataFrame(columns = ['musicbrainz-artist-id', 'artist-name']) df_tracks= pd.DataFrame(columns = ['trackId','trackname'] ) chunksize =", "'\"' ]]) #print(var2) try: cursor.execute(sql_artists, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Artists var 1:", "\" \") print(\"nooooo\" + df_tracks['trackname'][i]) sys.exit(1) # the connection is not autocommited by", "'\"']]) #print(var2) try: cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var 1: \"+", "var2= None if isNaN(df_tracks['trackname'][i]) else ''.join([c for c in df_tracks['trackname'][i].strip() if c not", "de la conexión a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except", "changes. cnx.commit() for i in df_artist.index: # Execute the query var1= None if", "err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with your user name or password\") elif", "'\\f', '\\r','\\u000B','\\u0085','\\u2028','\\u2029','\\u0022', '\\u005C', '\\u0027', '\"']]) var2= None if isNaN(df_artist['artist-name'][i]) else ''.join([c for c", "a Mysql try: cnx = mysql.connector.connect(user='user_taller1', password='<PASSWORD>.', host='127.0.0.1', database='taller1') except mysql.connector.Error as err:", "if isNaN(df_tracks['trackId'][i]) else ''.join([c for c in df_tracks['trackId'][i].strip() if c not in ['\\t',", "cursor.execute(sql_tracks, (var1,var2)) except mysql.connector.errors.DataError as err: print(\"Track var 1: \"+ var1+ \" \")", "database='taller1') except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print(\"Something is wrong with", "1: \"+ var1+ \" \") print(\"Track var 2: \"+ var2+ \" \") print(\"nooooo\"", "var1+ \" \") print(\"Track var 2: \"+ var2+ \" \") print(\"nooooo\" + df_tracks['trackname'][i])", "encoding=\"utf-8\", delimiter='\\r', chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data)", "commit to save our changes. cnx.commit() for i in df_artist.index: # Execute the", "reading file and dtaframes\") # Remove duplicates df_artist= df_artist.drop_duplicates(keep='first') df_artist = df_artist.reset_index(drop=True) df_tracks=", "chunksize=chunksize, header=None) as reader: with pd.read_csv('data/clean.tsv', encoding=\"utf-8\", delimiter='\\t', chunksize=chunksize, header=None, names=columns_data) as reader:" ]
[ "\"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class Const ( object ): ALL_LANES", "( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class", "code def getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code", "= Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include", "<unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\")", "Const ( object ): ALL_LANES = \"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT", "code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\")", "= \"agg_ht\" class Const ( object ): ALL_LANES = \"ALL_LANES\" class Krnl (", "= \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH =", "HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI", "\"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\"", "getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type", "def getIncludes (): code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include", "Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\"", "functions class Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI", "= \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI =", "code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code = Code() code.add(\"#include", "<list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\")", "import Code # includes def getIncludes (): code = Code() code.add(\"#include <list>\") code.add(\"#include", "\\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\")", "\"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT", "object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI =", "INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class Fct", "(): code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include", "code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include", "= \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class Fct (", "): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class Const (", "): ALL_LANES = \"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY", "= \"unique_ht\" AGG_HT = \"agg_ht\" class Const ( object ): ALL_LANES = \"ALL_LANES\"", "= \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class Const ( object ):", "\"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\" HASH_AGG_BUCKET = \"hashAggregateGetBucket\" HASH_AGG_CHECK = \"hashAggregateFindBucket\"", "object ): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class Const", "\"unique_ht\" AGG_HT = \"agg_ht\" class Const ( object ): ALL_LANES = \"ALL_LANES\" class", "dogqc.code import Code # includes def getIncludes (): code = Code() code.add(\"#include <list>\")", "\"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\" HASH_AGG_BUCKET = \"hashAggregateGetBucket\"", "code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes ():", "code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object", "return code def getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return", "Krnl ( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\"", "code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code =", "MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class Const ( object", "= \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\" HASH_AGG_BUCKET = \"hashAggregateGetBucket\" HASH_AGG_CHECK =", "code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT", "# includes def getIncludes (): code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include", "class Krnl ( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT =", "code class Type ( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT", "from dogqc.code import Code # includes def getIncludes (): code = Code() code.add(\"#include", "<vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\")", "# functions class Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\"", "Type ( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\"", "\\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\")", "HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\" HASH_AGG_BUCKET", "INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class Fct ( object ):", "= \"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\"", "): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\"", "object ): ALL_LANES = \"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT = \"initAggHT\"", "<float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code", "Code # includes def getIncludes (): code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\")", "= Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object ):", "HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\" HASH_AGG_BUCKET = \"hashAggregateGetBucket\" HASH_AGG_CHECK", "\"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class", "code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include", "\"agg_ht\" class Const ( object ): ALL_LANES = \"ALL_LANES\" class Krnl ( object", "Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object ): MULTI_HT", "code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include", "code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object ): MULTI_HT =", "code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return", "object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT =", "\\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object ): MULTI_HT = \"multi_ht\"", "): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\"", "<iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\")", "class Type ( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\" AGG_HT =", "HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH", "class Const ( object ): ALL_LANES = \"ALL_LANES\" class Krnl ( object ):", "(): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class Type (", "<limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes", "\\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code = Code()", "\"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class Fct ( object ): HASH_BUILD_UNIQUE =", "INIT_MULTI_HT = \"initMultiHT\" # functions class Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\"", "\"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class Fct ( object", "\"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\"", "ALL_LANES = \"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY =", "( object ): INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT", "AGG_HT = \"agg_ht\" class Const ( object ): ALL_LANES = \"ALL_LANES\" class Krnl", "= \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions", "= \"initMultiHT\" # functions class Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE", "( object ): ALL_LANES = \"ALL_LANES\" class Krnl ( object ): INIT_AGG_HT =", "class Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI =", "( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE = \"hashProbeUnique\" HASH_COUNT_MULTI = \"hashCountMulti\" HASH_INSERT_MULTI", "getIncludes (): code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\")", "code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include", "def getCudaIncludes (): code = Code() code.add(\"#include \\\"../dogqc/include/util.cuh\\\"\") code.add(\"#include \\\"../dogqc/include/hashing.cuh\\\"\") return code class", "INIT_AGG_HT = \"initAggHT\" INIT_ARRAY = \"initArray\" INIT_UNIQUE_HT = \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" #", "includes def getIncludes (): code = Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\")", "return code class Type ( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT = \"unique_ht\"", "code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include", "UNIQUE_HT = \"unique_ht\" AGG_HT = \"agg_ht\" class Const ( object ): ALL_LANES =", "= \"hashCountMulti\" HASH_INSERT_MULTI = \"hashInsertMulti\" HASH_PROBE_MULTI = \"hashProbeMulti\" HASH = \"hash\" HASH_AGG_BUCKET =", "code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code def", "\\\"../dogqc/include/hashing.cuh\\\"\") return code class Type ( object ): MULTI_HT = \"multi_ht\" UNIQUE_HT =", "\"initMultiHT\" # functions class Fct ( object ): HASH_BUILD_UNIQUE = \"hashBuildUnique\" HASH_PROBE_UNIQUE =", "Code() code.add(\"#include <list>\") code.add(\"#include <unordered_map>\") code.add(\"#include <vector>\") code.add(\"#include <iostream>\") code.add(\"#include <ctime>\") code.add(\"#include <limits.h>\")", "= \"initUniqueHT\" INIT_MULTI_HT = \"initMultiHT\" # functions class Fct ( object ): HASH_BUILD_UNIQUE", "<ctime>\") code.add(\"#include <limits.h>\") code.add(\"#include <float.h>\") code.add(\"#include \\\"../dogqc/include/csv.h\\\"\") code.add(\"#include \\\"../dogqc/include/util.h\\\"\") code.add(\"#include \\\"../dogqc/include/mappedmalloc.h\\\"\") return code" ]
[ "stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input to data file server. :return:", "import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True)", "import requests as rqc from flask.ext.restful import Resource from flask import Response, request", "Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self):", "data file server. :return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(),", "Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input to data file server.", "# -*- coding: utf-8 -*- import logging import requests as rqc from flask.ext.restful", "= rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input", "flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION,", "'http://dataserver:3000/' class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type']", "stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return", "request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req", "\"\"\" Stream input to data file server. :return: \"\"\" logging.debug('UPLOAD POST') req =", "INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response(", "def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self):", "class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] )", "utf-8 -*- import logging import requests as rqc from flask.ext.restful import Resource from", ") def post(self): \"\"\" Stream input to data file server. :return: \"\"\" logging.debug('UPLOAD", "coding: utf-8 -*- import logging import requests as rqc from flask.ext.restful import Resource", ":return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(), stream=True) return Response(", "flask.ext.restful import Resource from flask import Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION", "input to data file server. :return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION +", "POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(), stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] )", "logging import requests as rqc from flask.ext.restful import Resource from flask import Response,", "file server. :return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(), stream=True)", "return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input to data file", "as rqc from flask.ext.restful import Resource from flask import Response, request from flask", "= 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()),", "flask import Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource):", "requests as rqc from flask.ext.restful import Resource from flask import Response, request from", "from flask import Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class", "Stream input to data file server. :return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION", "UploadResource(Resource): def get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def", "to data file server. :return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data',", "logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(), stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type']", "content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input to data file server. :return: \"\"\"", "stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input to data", "req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream", "-*- import logging import requests as rqc from flask.ext.restful import Resource from flask", "get(self): req = rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\"", "from flask.ext.restful import Resource from flask import Response, request from flask import stream_with_context", "post(self): \"\"\" Stream input to data file server. :return: \"\"\" logging.debug('UPLOAD POST') req", "import Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def", "from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/' class UploadResource(Resource): def get(self): req =", "rqc from flask.ext.restful import Resource from flask import Response, request from flask import", "Resource from flask import Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION = 'http://dataserver:3000/'", "import logging import requests as rqc from flask.ext.restful import Resource from flask import", "rqc.get(INPUT_DATA_SERVER_LOCATION, stream=True) return Response( stream_with_context(req.iter_content()), content_type=req.headers['content-type'] ) def post(self): \"\"\" Stream input to", "\"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(), stream=True) return Response( stream_with_context(req.iter_content()),", "server. :return: \"\"\" logging.debug('UPLOAD POST') req = rqc.post(INPUT_DATA_SERVER_LOCATION + 'data', json=request.stream.read(), stream=True) return", "-*- coding: utf-8 -*- import logging import requests as rqc from flask.ext.restful import", "import Resource from flask import Response, request from flask import stream_with_context INPUT_DATA_SERVER_LOCATION =", "def post(self): \"\"\" Stream input to data file server. :return: \"\"\" logging.debug('UPLOAD POST')" ]
[ "good introduction for a college essay</a> boston university college essay <a href=\" https://collegeessays.us/", "\"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\", \"university\", \"us\", \"write\", \"www\",", "TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship essays - <a href=\" https://collegeessays.us/ \">how", "from fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship essays", "- <a href=\" https://collegeessays.us/ \">how to write a good introduction for a college", "scholarship essays - <a href=\" https://collegeessays.us/ \">how to write a good introduction for", "a good introduction for a college essay</a> boston university college essay <a href=\"", "<a href=\" https://collegeessays.us/ \">how to write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual(", "SimpleTestCase from fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship", "boston university college essay <a href=\" https://collegeessays.us/ \">how to write an essay for", "\"\"\"college scholarship essays - <a href=\" https://collegeessays.us/ \">how to write a good introduction", "fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship essays -", "\">how to write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\",", "write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\",", "\"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\", \"university\", \"us\", \"write\", \"www\", },", "import SimpleTestCase from fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college", "college essay</a> boston university college essay <a href=\" https://collegeessays.us/ \">how to write an", "essay <a href=\" https://collegeessays.us/ \">how to write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\"", "django.test import SimpleTestCase from fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text =", "\"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\",", "\"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\",", "split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\",", "\"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\", \"university\", \"us\", \"write\",", "essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\",", "\"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\",", "test_split_words(self): text = \"\"\"college scholarship essays - <a href=\" https://collegeessays.us/ \">how to write", "from django.test import SimpleTestCase from fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text", "\"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\",", "https://collegeessays.us/ \">how to write a good introduction for a college essay</a> boston university", "\"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\",", "href=\" https://collegeessays.us/ \">how to write a good introduction for a college essay</a> boston", "import split_words class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship essays - <a", "<filename>fluent_comments/tests/test_utils.py from django.test import SimpleTestCase from fluent_comments.utils import split_words class TestUtils(SimpleTestCase): def test_split_words(self):", "text = \"\"\"college scholarship essays - <a href=\" https://collegeessays.us/ \">how to write a", "\">how to write a good introduction for a college essay</a> boston university college", "for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\",", "\"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\",", "introduction for a college essay</a> boston university college essay <a href=\" https://collegeessays.us/ \">how", "href=\" https://collegeessays.us/ \">how to write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text),", "http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\",", "\"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\",", "\"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\",", "https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\",", "https://collegeessays.us/ \">how to write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), {", "essay</a> boston university college essay <a href=\" https://collegeessays.us/ \">how to write an essay", "= \"\"\"college scholarship essays - <a href=\" https://collegeessays.us/ \">how to write a good", "\"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\",", "def test_split_words(self): text = \"\"\"college scholarship essays - <a href=\" https://collegeessays.us/ \">how to", "an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\",", "essays - <a href=\" https://collegeessays.us/ \">how to write a good introduction for a", "{ \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\", \"for\", \"good\",", "\"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\", \"university\", \"us\", \"write\", \"www\", }, )", "college essay <a href=\" https://collegeessays.us/ \">how to write an essay for college</a> https://collegeessays.us/", "split_words class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship essays - <a href=\"", "for a college essay</a> boston university college essay <a href=\" https://collegeessays.us/ \">how to", "to write a good introduction for a college essay</a> boston university college essay", "write a good introduction for a college essay</a> boston university college essay <a", "\"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\", \"university\",", "<a href=\" https://collegeessays.us/ \">how to write a good introduction for a college essay</a>", "to write an essay for college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\",", "self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\", \"d\", \"essay\", \"essays\",", "\"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\",", "\"d\", \"essay\", \"essays\", \"for\", \"good\", \"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\",", "college</a> https://collegeessays.us/ http://www.monkeyface.com/__media__/js/netsoltrademark.php?d=collegeessays.us\"\"\" self.assertEqual( split_words(text), { \"__media__\", \"a\", \"an\", \"boston\", \"college\", \"collegeessays\", \"com\",", "class TestUtils(SimpleTestCase): def test_split_words(self): text = \"\"\"college scholarship essays - <a href=\" https://collegeessays.us/", "university college essay <a href=\" https://collegeessays.us/ \">how to write an essay for college</a>", "a college essay</a> boston university college essay <a href=\" https://collegeessays.us/ \">how to write", "\"how\", \"href\", \"http\", \"https\", \"introduction\", \"js\", \"monkeyface\", \"netsoltrademark\", \"php\", \"scholarship\", \"to\", \"university\", \"us\"," ]
[ "path = os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language) if", "want the tweets in process - A Boolean to specify pre-processing tweets. Output->", "df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str)", "__name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args", "os.mkdir(path) df_dict = get_data_for_lang(language) if process == True: for file in os.listdir(path): path", "if process == True: for file in os.listdir(path): path = os.path.join(\"path\", file) df", "get_data_for_lang from TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas as pd import argparse", "pre-processing tweets. Output-> x - a dataframe containing extracted data. \"\"\" directory =", "os.listdir(path): path = os.path.join(\"path\", file) df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',',", "sep=',', index=False) if __name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process',", "import pandas as pd import argparse from distutils.sysconfig import get_python_lib def store_data(language, process", "a striing specifying the language you want the tweets in process - A", "+ \"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict =", "language - a striing specifying the language you want the tweets in process", "os import pandas as pd import argparse from distutils.sysconfig import get_python_lib def store_data(language,", "= pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang',", "x - a dataframe containing extracted data. \"\"\" directory = \"datasets\" parent_dir =", "index=False) if __name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store',", "the language specified. Can create processed datasets if process flag is set as", "df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\":", "file for the language specified. Can create processed datasets if process flag is", "set as True Input-> language - a striing specifying the language you want", "create processed datasets if process flag is set as True Input-> language -", "\"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path) ==", "import get_python_lib def store_data(language, process = False): \"\"\" Cretaes a .csv file for", "in os.listdir(path): path = os.path.join(\"path\", file) df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path,", "df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\": my_parser = argparse.ArgumentParser()", "argparse from distutils.sysconfig import get_python_lib def store_data(language, process = False): \"\"\" Cretaes a", "A Boolean to specify pre-processing tweets. Output-> x - a dataframe containing extracted", "extracted data. \"\"\" directory = \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path =", "specifying the language you want the tweets in process - A Boolean to", "== True: for file in os.listdir(path): path = os.path.join(\"path\", file) df = pd.read_csv(path)", "for the language specified. Can create processed datasets if process flag is set", "argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if args.process ==", "- a striing specifying the language you want the tweets in process -", "process - A Boolean to specify pre-processing tweets. Output-> x - a dataframe", "TLA.Data.get_tweets import get_data_for_lang from TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas as pd", "= \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path)", "flag is set as True Input-> language - a striing specifying the language", "file) df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ ==", "language specified. Can create processed datasets if process flag is set as True", "get_data_for_lang(language) if process == True: for file in os.listdir(path): path = os.path.join(\"path\", file)", "Output-> x - a dataframe containing extracted data. \"\"\" directory = \"datasets\" parent_dir", "TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas as pd import argparse from distutils.sysconfig", "data. \"\"\" directory = \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir,", "action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if args.process == None: store_data(args.lang)", "process = False): \"\"\" Cretaes a .csv file for the language specified. Can", "dataframe containing extracted data. \"\"\" directory = \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\"", "== False: os.mkdir(path) df_dict = get_data_for_lang(language) if process == True: for file in", "a dataframe containing extracted data. \"\"\" directory = \"datasets\" parent_dir = get_python_lib() +", "if process flag is set as True Input-> language - a striing specifying", "is set as True Input-> language - a striing specifying the language you", "\"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args()", "file in os.listdir(path): path = os.path.join(\"path\", file) df = pd.read_csv(path) df_processed = pre_process_tweet(df)", "== \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args =", "datasets if process flag is set as True Input-> language - a striing", "distutils.sysconfig import get_python_lib def store_data(language, process = False): \"\"\" Cretaes a .csv file", "the language you want the tweets in process - A Boolean to specify", "True: for file in os.listdir(path): path = os.path.join(\"path\", file) df = pd.read_csv(path) df_processed", "def store_data(language, process = False): \"\"\" Cretaes a .csv file for the language", "\"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language)", "to specify pre-processing tweets. Output-> x - a dataframe containing extracted data. \"\"\"", "= os.path.join(\"path\", file) df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if", "from distutils.sysconfig import get_python_lib def store_data(language, process = False): \"\"\" Cretaes a .csv", "pandas as pd import argparse from distutils.sysconfig import get_python_lib def store_data(language, process =", "specify pre-processing tweets. Output-> x - a dataframe containing extracted data. \"\"\" directory", "= pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\": my_parser", "my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if", "Boolean to specify pre-processing tweets. Output-> x - a dataframe containing extracted data.", "from TLA.Data.get_tweets import get_data_for_lang from TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas as", "store_data(language, process = False): \"\"\" Cretaes a .csv file for the language specified.", "specified. Can create processed datasets if process flag is set as True Input->", "True Input-> language - a striing specifying the language you want the tweets", "Cretaes a .csv file for the language specified. Can create processed datasets if", "import pre_process_tweet import os import pandas as pd import argparse from distutils.sysconfig import", "as True Input-> language - a striing specifying the language you want the", "\"\"\" directory = \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory)", "= argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if args.process", "directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language) if process == True:", "pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\": my_parser =", "type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if args.process == None: store_data(args.lang) else:", "= os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language) if process", "tweets. Output-> x - a dataframe containing extracted data. \"\"\" directory = \"datasets\"", "path = os.path.join(\"path\", file) df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False)", "the tweets in process - A Boolean to specify pre-processing tweets. Output-> x", "False: os.mkdir(path) df_dict = get_data_for_lang(language) if process == True: for file in os.listdir(path):", "\"\"\" Cretaes a .csv file for the language specified. Can create processed datasets", "containing extracted data. \"\"\" directory = \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path", "os.path.join(\"path\", file) df = pd.read_csv(path) df_processed = pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__", "df_dict = get_data_for_lang(language) if process == True: for file in os.listdir(path): path =", "pre_process_tweet import os import pandas as pd import argparse from distutils.sysconfig import get_python_lib", "if os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language) if process == True: for", "process flag is set as True Input-> language - a striing specifying the", "language you want the tweets in process - A Boolean to specify pre-processing", "as pd import argparse from distutils.sysconfig import get_python_lib def store_data(language, process = False):", "processed datasets if process flag is set as True Input-> language - a", "import argparse from distutils.sysconfig import get_python_lib def store_data(language, process = False): \"\"\" Cretaes", "get_python_lib def store_data(language, process = False): \"\"\" Cretaes a .csv file for the", "my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if args.process == None: store_data(args.lang) else: store_data(args.lang,", "for file in os.listdir(path): path = os.path.join(\"path\", file) df = pd.read_csv(path) df_processed =", "a .csv file for the language specified. Can create processed datasets if process", "os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language) if process ==", "from TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas as pd import argparse from", "import get_data_for_lang from TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas as pd import", "pd import argparse from distutils.sysconfig import get_python_lib def store_data(language, process = False): \"\"\"", ".csv file for the language specified. Can create processed datasets if process flag", "parent_dir = get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path) == False:", "my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool) args = my_parser.parse_args() if args.process == None:", "<filename>TLA/Data/get_data.py from TLA.Data.get_tweets import get_data_for_lang from TLA.Data.Pre_Process_Tweets import pre_process_tweet import os import pandas", "False): \"\"\" Cretaes a .csv file for the language specified. Can create processed", "action='store', type=bool) args = my_parser.parse_args() if args.process == None: store_data(args.lang) else: store_data(args.lang, args.process)", "= get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path)", "pre_process_tweet(df) df_processed.to_csv(path, sep=',', index=False) if __name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store',", "striing specifying the language you want the tweets in process - A Boolean", "- a dataframe containing extracted data. \"\"\" directory = \"datasets\" parent_dir = get_python_lib()", "Can create processed datasets if process flag is set as True Input-> language", "get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory) if os.path.isdir(path) == False: os.mkdir(path) df_dict", "directory = \"datasets\" parent_dir = get_python_lib() + \"/TLA/Data\" path = os.path.join(parent_dir, directory) if", "tweets in process - A Boolean to specify pre-processing tweets. Output-> x -", "Input-> language - a striing specifying the language you want the tweets in", "os.path.isdir(path) == False: os.mkdir(path) df_dict = get_data_for_lang(language) if process == True: for file", "process == True: for file in os.listdir(path): path = os.path.join(\"path\", file) df =", "- A Boolean to specify pre-processing tweets. Output-> x - a dataframe containing", "= False): \"\"\" Cretaes a .csv file for the language specified. Can create", "in process - A Boolean to specify pre-processing tweets. Output-> x - a", "import os import pandas as pd import argparse from distutils.sysconfig import get_python_lib def", "you want the tweets in process - A Boolean to specify pre-processing tweets.", "if __name__ == \"__main__\": my_parser = argparse.ArgumentParser() my_parser.add_argument('--lang', action='store', type=str) my_parser.add_argument('--process', action='store', type=bool)", "= get_data_for_lang(language) if process == True: for file in os.listdir(path): path = os.path.join(\"path\"," ]
[ "as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64,", ") as f: info = json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])):", "model = \"site\" obj = agent.site elif temp[0] == \"agent\": model = \"agent\"", "str, value: Any) -> str: if shell == \"cmd\": return \"1\" if value", "blank=True) filename = models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" )", "= [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name", "new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = (", "logger.error(e) continue else: temp_args.append(arg) return temp_args def format_shell_array(shell: str, value: Any) -> str:", "( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\",", "else: return \"\" @classmethod def load_community_scripts(cls): import json import os from pathlib import", "f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\",", "for arg in args: match = pattern.match(arg) if match: # only get the", "json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] )", ") default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90 ) args", "\"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding new community script: {script['name']}\")", "+= item + \",\" return temp_string.strip(\",\") else: # python temp_string = \"\" for", "\"powershell\": temp_string = \"\" for item in value: temp_string += item + \",\"", "elif shell == \"powershell\": temp_string = \"\" for item in value: temp_string +=", "database # skip ones that already exist, only updating name / desc in", "obj = agent else: # ignore arg since it is invalid continue if", "args=args, ).save() @staticmethod def serialize(script): # serializes the script and returns json from", "hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1])", "value = format_shell_array(shell, value) elif value and field.type == \"checkbox\": value = format_shell_bool(shell,", "as e: logger.error(e) continue else: temp_args.append(arg) return temp_args def format_shell_array(shell: str, value: Any)", "temp_string += item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) ->", "script[\"description\"] i.category = category i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args = args", "import re from loguru import logger from typing import Any, List, Union from", "= getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value", "models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\",", "shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\"", "# check for model and property if len(temp) != 2: # ignore arg", "in value: temp_string += item + \",\" return temp_string.strip(\",\") else: # python temp_string", "check if value exists and if not use defa if value and field.type", "script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\",", "\"powershell\": return \"$True\" if value else \"$False\" else: # python return \"True\" if", "ones that already exist, only updating name / desc in case it changes", "self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def load_community_scripts(cls): import json", "= CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists():", "arg since property is invalid continue # replace the value in the arg", "if value else \"$False\" else: # python return \"True\" if value else \"False\"", "value) if not value: continue else: # ignore arg since property is invalid", "s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"] if \"category\" in script.keys()", "and second should be property temp = string.split(\".\") # check for model and", "\"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built", "import os from pathlib import Path from django.conf import settings # load community", "ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell: str, args: List[str] =", "value) elif value and field.type == \"checkbox\": value = format_shell_bool(shell, value) if not", "return [] temp_args = list() # pattern to match for injection pattern =", "# serializes the script and returns json from .serializers import ScriptSerializer return ScriptSerializer(script).data", "# type: ignore except Exception as e: logger.error(e) continue else: temp_args.append(arg) return temp_args", "\"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description", "desc in case it changes # for install script if not settings.DOCKER_BUILD: scripts_dir", "since it is invalid continue if temp[0] == \"client\": model = \"client\" obj", ") args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False)", "list() # pattern to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in", "temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields", "match between the () in regex string = match.group(1) # split by period", "open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info = json.load(f) for script in info:", "for model and property if len(temp) != 2: # ignore arg since it", "def format_shell_bool(shell: str, value: Any) -> str: if shell == \"cmd\": return \"1\"", "\"args\", ] ) else: print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\")", "model and property if len(temp) != 2: # ignore arg since it is", "@staticmethod def serialize(script): # serializes the script and returns json from .serializers import", "() in regex string = match.group(1) # split by period if exists. First", "import Any, List, Union from django.conf import settings from django.contrib.postgres.fields import ArrayField from", "injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match = pattern.match(arg) if match:", "only get the match between the () in regex string = match.group(1) #", "script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir", "and property if len(temp) != 2: # ignore arg since it is invalid", "model and second should be property temp = string.split(\".\") # check for model", "<gh_stars>0 import base64 import re from loguru import logger from typing import Any,", "\"agent\" obj = agent else: # ignore arg since it is invalid continue", "code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script):", "args: List[str] = list() ) -> Union[List[str], None]: from core.models import CustomField if", "= models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self):", "= re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match = pattern.match(arg) if match: # only", "# replace the value in the arg and push to array # log", "== \"powershell\": temp_string = \"\" for item in value: temp_string += item +", "django.db import models from logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\",", "max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, )", "code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\"", "# only get the match between the () in regex string = match.group(1)", "model_fields = getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model:", "blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def code(self): if self.code_base64:", "field.default_value # check if value exists and if not use defa if value", "ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False) category = models.CharField(max_length=100,", "into the database # skip ones that already exist, only updating name /", "for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match = pattern.match(arg) if", "SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel):", "== \"cmd\": return \"array args are not supported with batch\" elif shell ==", "for install script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker", "json import os from pathlib import Path from django.conf import settings # load", "typing import Any, List, Union from django.conf import settings from django.contrib.postgres.fields import ArrayField", "script[\"shell\"] i.default_timeout = default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f:", "\"$True\" if value else \"$False\" else: # python return \"True\" if value else", "since property is invalid continue # replace the value in the arg and", "serialize(script): # serializes the script and returns json from .serializers import ScriptSerializer return", "getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\")", "os from pathlib import Path from django.conf import settings # load community uploaded", "= format_shell_array(shell, value) elif value and field.type == \"checkbox\": value = format_shell_bool(shell, value)", "s.first() i.name = script[\"name\"] i.description = script[\"description\"] i.category = category i.shell = script[\"shell\"]", "ignore except Exception as e: logger.error(e) continue else: temp_args.append(arg) return temp_args def format_shell_array(shell:", "should be model and second should be property temp = string.split(\".\") # check", "= script[\"name\"] i.description = script[\"description\"] i.category = category i.shell = script[\"shell\"] i.default_timeout =", "in regex string = match.group(1) # split by period if exists. First should", "load community uploaded scripts into the database # skip ones that already exist,", "is invalid continue if hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists():", "open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 =", "str: if shell == \"cmd\": return \"1\" if value else \"0\" elif shell", "i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\")", "os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info = json.load(f) for script in info: if", "shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): # serializes the script", "import settings # load community uploaded scripts into the database # skip ones", "str: if shell == \"cmd\": return \"array args are not supported with batch\"", "args: match = pattern.match(arg) if match: # only get the match between the", "ArrayField from django.db import models from logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\",", "# skip ones that already exist, only updating name / desc in case", "core.models import CustomField if not list: return [] temp_args = list() # pattern", "self.name @property def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\")", "import json import os from pathlib import Path from django.conf import settings #", "agent.client elif temp[0] == \"site\": model = \"site\" obj = agent.site elif temp[0]", "script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True), null=True,", "json from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell:", "settings # load community uploaded scripts into the database # skip ones that", "script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): # serializes the script and", "= model_fields.get(**{model: obj}).value if not value and field.default_value: value = field.default_value # check", "= models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout =", ") -> Union[List[str], None]: from core.models import CustomField if not list: return []", "value: Any) -> str: if shell == \"cmd\": return \"1\" if value else", "continue else: # ignore arg since property is invalid continue # replace the", "import Path from django.conf import settings # load community uploaded scripts into the", "import logger from typing import Any, List, Union from django.conf import settings from", "= ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\",", "it changes # for install script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\")", "( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90 ) args = script[\"args\"] if", "First should be model and second should be property temp = string.split(\".\") #", "updating name / desc in case it changes # for install script if", "import models from logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch", "# for install script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for", "in value: temp_string += item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value:", "models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return", "Exception as e: logger.error(e) continue else: temp_args.append(arg) return temp_args def format_shell_array(shell: str, value:", "models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated shell = models.CharField(", "temp[0] == \"site\": model = \"site\" obj = agent.site elif temp[0] == \"agent\":", "scripts into the database # skip ones that already exist, only updating name", "\"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90 )", "and push to array # log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value,", "script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"] if \"category\" in", "else: temp_args.append(arg) return temp_args def format_shell_array(shell: str, value: Any) -> str: if shell", "default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): # serializes the script and returns json", "return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) -> str: if shell == \"cmd\":", "settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info = json.load(f) for script", "string.split(\".\") # check for model and property if len(temp) != 2: # ignore", "be model and second should be property temp = string.split(\".\") # check for", "format_shell_bool(shell, value) if not value: continue else: # ignore arg since property is", "category = models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def", "property temp = string.split(\".\") # check for model and property if len(temp) !=", "\"scripts/community_scripts.json\") ) as f: info = json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir,", "i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding", "item in value: temp_string += item + \",\" return temp_string.strip(\",\") else: # python", "continue else: temp_args.append(arg) return temp_args def format_shell_array(shell: str, value: Any) -> str: if", "null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64", "return self.name @property def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\",", "\"ignore\") else: return \"\" @classmethod def load_community_scripts(cls): import json import os from pathlib", "in script.keys() else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys()", "else: print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes", "Any) -> str: if shell == \"cmd\": return \"1\" if value else \"0\"", "shell == \"cmd\": return \"array args are not supported with batch\" elif shell", "# check if value exists and if not use defa if value and", "arg since it is invalid continue if hasattr(obj, temp[1]): value = getattr(obj, temp[1])", "skip ones that already exist, only updating name / desc in case it", "docker else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info", "base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod", "format_shell_bool(shell: str, value: Any) -> str: if shell == \"cmd\": return \"1\" if", "category = ( script[\"category\"] if \"category\" in script.keys() else \"Community\" ) default_timeout =", "parse_script_args( cls, agent, shell: str, args: List[str] = list() ) -> Union[List[str], None]:", "get the match between the () in regex string = match.group(1) # split", "name / desc in case it changes # for install script if not", "temp_string += item + \",\" return temp_string.strip(\",\") else: # python temp_string = \"\"", "return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def load_community_scripts(cls): import json import os", "and if not use defa if value and field.type == \"multiple\": value =", "+ \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) -> str: if shell", "value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields =", "not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir = settings.SCRIPTS_DIR", ") favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True)", "os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"] if \"category\"", "name=script[\"name\"] ) category = ( script[\"category\"] if \"category\" in script.keys() else \"Community\" )", "cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def", "match = pattern.match(arg) if match: # only get the match between the ()", "match.group(1) # split by period if exists. First should be model and second", "base64 import re from loguru import logger from typing import Any, List, Union", "temp_args = list() # pattern to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for", "ignore arg since property is invalid continue # replace the value in the", "obj = agent.client elif temp[0] == \"site\": model = \"site\" obj = agent.site", "[] temp_args = list() # pattern to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\")", "model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if not value and field.default_value: value =", "Union from django.conf import settings from django.contrib.postgres.fields import ArrayField from django.db import models", "if exists. First should be model and second should be property temp =", "invalid continue if hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field", "None]: from core.models import CustomField if not list: return [] temp_args = list()", "CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists(): value", "shell == \"cmd\": return \"1\" if value else \"0\" elif shell == \"powershell\":", "blank=True, default=list, ) favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64 =", "= format_shell_bool(shell, value) if not value: continue else: # ignore arg since property", "code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args,", "{script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") )", "to array # log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) #", "@classmethod def load_community_scripts(cls): import json import os from pathlib import Path from django.conf", "len(temp) != 2: # ignore arg since it is invalid continue if temp[0]", "\"1\" if value else \"0\" elif shell == \"powershell\": return \"$True\" if value", "import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell: str, args: List[str]", "return \"array args are not supported with batch\" elif shell == \"powershell\": temp_string", "open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 =", ").save() @staticmethod def serialize(script): # serializes the script and returns json from .serializers", "as f: info = json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s", "else [] if s.exists(): i = s.first() i.name = script[\"name\"] i.description = script[\"description\"]", "script[\"category\"] if \"category\" in script.keys() else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if", "os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\")", "install script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else:", "split by period if exists. First should be model and second should be", "= default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes =", "= json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"]", "obj}).exists(): value = model_fields.get(**{model: obj}).value if not value and field.default_value: value = field.default_value", "by period if exists. First should be model and second should be property", "property if len(temp) != 2: # ignore arg since it is invalid continue", "name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists(): value =", "if value else \"0\" elif shell == \"powershell\": return \"$True\" if value else", "[] if s.exists(): i = s.first() i.name = script[\"name\"] i.description = script[\"description\"] i.category", "category i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]),", "temp_args.append(arg) return temp_args def format_shell_array(shell: str, value: Any) -> str: if shell ==", "= os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR,", "community uploaded scripts into the database # skip ones that already exist, only", "# deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100,", "in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = (", "/ desc in case it changes # for install script if not settings.DOCKER_BUILD:", "i.name = script[\"name\"] i.description = script[\"description\"] i.category = category i.shell = script[\"shell\"] i.default_timeout", "value else \"0\" elif shell == \"powershell\": return \"$True\" if value else \"$False\"", "scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info = json.load(f)", "value: temp_string += item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any)", "+ \",\" return temp_string.strip(\",\") else: # python temp_string = \"\" for item in", "\"scripts\") # for docker else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") )", "if shell == \"cmd\": return \"1\" if value else \"0\" elif shell ==", "f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\",", "continue if hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field =", "= agent.client elif temp[0] == \"site\": model = \"site\" obj = agent.site elif", "self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def", "cls, agent, shell: str, args: List[str] = list() ) -> Union[List[str], None]: from", "from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell: str,", "obj}).value if not value and field.default_value: value = field.default_value # check if value", "blank=True), null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True)", "script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category =", "unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except Exception as", ") i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\",", "django.conf import settings # load community uploaded scripts into the database # skip", "use defa if value and field.type == \"multiple\": value = format_shell_array(shell, value) elif", "models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\"", "# split by period if exists. First should be model and second should", "= \"client\" obj = agent.client elif temp[0] == \"site\": model = \"site\" obj", "models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True,", "if not value: continue else: # ignore arg since property is invalid continue", "\"\" for item in value: temp_string += item + \",\" return temp_string.strip(\",\") def", "category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): # serializes the script and returns", "agent else: # ignore arg since it is invalid continue if hasattr(obj, temp[1]):", "from typing import Any, List, Union from django.conf import settings from django.contrib.postgres.fields import", "arg)) # type: ignore except Exception as e: logger.error(e) continue else: temp_args.append(arg) return", "temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value", "== \"multiple\": value = format_shell_array(shell, value) elif value and field.type == \"checkbox\": value", "are not supported with batch\" elif shell == \"powershell\": temp_string = \"\" for", "default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite =", "if \"args\" in script.keys() else [] if s.exists(): i = s.first() i.name =", "@property def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else:", "default=list, ) favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True,", "else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90", ") else: print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f:", "= None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if not value and", "[ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\",", "if len(temp) != 2: # ignore arg since it is invalid continue if", "elif temp[0] == \"site\": model = \"site\" obj = agent.site elif temp[0] ==", "field.type == \"multiple\": value = format_shell_array(shell, value) elif value and field.type == \"checkbox\":", "choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField(", "\",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) -> str: if shell ==", "-> str: if shell == \"cmd\": return \"array args are not supported with", "continue # replace the value in the arg and push to array #", "= pattern.match(arg) if match: # only get the match between the () in", "elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value =", "in case it changes # for install script if not settings.DOCKER_BUILD: scripts_dir =", "the arg and push to array # log any unhashable type errors try:", "value: continue else: # ignore arg since property is invalid continue # replace", "== \"agent\": model = \"agent\" obj = agent else: # ignore arg since", "\"\" @classmethod def load_community_scripts(cls): import json import os from pathlib import Path from", "\"client\": model = \"client\" obj = agent.client elif temp[0] == \"site\": model =", "value exists and if not use defa if value and field.type == \"multiple\":", "\"multiple\": value = format_shell_array(shell, value) elif value and field.type == \"checkbox\": value =", "return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell: str, args: List[str] = list()", "( script[\"category\"] if \"category\" in script.keys() else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"])", "models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args", "== \"checkbox\": value = format_shell_bool(shell, value) if not value: continue else: # ignore", "def format_shell_array(shell: str, value: Any) -> str: if shell == \"cmd\": return \"array", "Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated", "name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated shell", "(\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\",", "\"0\" elif shell == \"powershell\": return \"$True\" if value else \"$False\" else: #", "default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = (", "re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match = pattern.match(arg) if match: # only get", "90 ) args = script[\"args\"] if \"args\" in script.keys() else [] if s.exists():", "= base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] )", "settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir = settings.SCRIPTS_DIR with", "if not value and field.default_value: value = field.default_value # check if value exists", "temp[0] == \"agent\": model = \"agent\" obj = agent else: # ignore arg", "match: # only get the match between the () in regex string =", "] ) else: print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as", "# ignore arg since property is invalid continue # replace the value in", "= models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True,", "\"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir,", "if not use defa if value and field.type == \"multiple\": value = format_shell_array(shell,", "= models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated shell =", "\"category\" in script.keys() else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in", "script[\"args\"] if \"args\" in script.keys() else [] if s.exists(): i = s.first() i.name", "str, args: List[str] = list() ) -> Union[List[str], None]: from core.models import CustomField", "return \"1\" if value else \"0\" elif shell == \"powershell\": return \"$True\" if", "else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info =", "exists. First should be model and second should be property temp = string.split(\".\")", "serializes the script and returns json from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod", "\"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\",", "field.default_value: value = field.default_value # check if value exists and if not use", "base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else:", "[ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name =", "Any) -> str: if shell == \"cmd\": return \"array args are not supported", "Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description =", "community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\",", "except Exception as e: logger.error(e) continue else: temp_args.append(arg) return temp_args def format_shell_array(shell: str,", "elif temp[0] == \"agent\": model = \"agent\" obj = agent else: # ignore", "= agent else: # ignore arg since it is invalid continue if hasattr(obj,", "not value: continue else: # ignore arg since property is invalid continue #", "code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def", "models from logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"),", "should be property temp = string.split(\".\") # check for model and property if", "from django.db import models from logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"),", "field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model:", "(\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ]", "= models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" )", "= base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save()", "obj = agent.site elif temp[0] == \"agent\": model = \"agent\" obj = agent", "value = None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if not value", "for item in value: temp_string += item + \",\" return temp_string.strip(\",\") else: #", "\"checkbox\": value = format_shell_bool(shell, value) if not value: continue else: # ignore arg", "it is invalid continue if temp[0] == \"client\": model = \"client\" obj =", "from django.conf import settings # load community uploaded scripts into the database #", "the match between the () in regex string = match.group(1) # split by", "int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90 ) args = script[\"args\"] if \"args\"", "second should be property temp = string.split(\".\") # check for model and property", "load_community_scripts(cls): import json import os from pathlib import Path from django.conf import settings", "regex string = match.group(1) # split by period if exists. First should be", "value = model_fields.get(**{model: obj}).value if not value and field.default_value: value = field.default_value #", "choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite", "log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except", "\"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG)", "= script[\"description\"] i.category = category i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args =", "default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def code(self): if self.code_base64: base64_bytes", "format_shell_array(shell: str, value: Any) -> str: if shell == \"cmd\": return \"array args", "with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64", "arg in args: match = pattern.match(arg) if match: # only get the match", "only updating name / desc in case it changes # for install script", "= [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [", "= models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def code(self):", "== \"client\": model = \"client\" obj = agent.client elif temp[0] == \"site\": model", "(CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"),", "elif value and field.type == \"checkbox\": value = format_shell_bool(shell, value) if not value:", "] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class", "to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match =", "= self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def load_community_scripts(cls): import", "is invalid continue if temp[0] == \"client\": model = \"client\" obj = agent.client", "str, value: Any) -> str: if shell == \"cmd\": return \"array args are", "if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if not value and field.default_value: value", "format_shell_array(shell, value) elif value and field.type == \"checkbox\": value = format_shell_bool(shell, value) if", "class Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) #", "arg and push to array # log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\",", "script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\")", "not use defa if value and field.type == \"multiple\": value = format_shell_array(shell, value)", "type: ignore except Exception as e: logger.error(e) continue else: temp_args.append(arg) return temp_args def", "List, Union from django.conf import settings from django.contrib.postgres.fields import ArrayField from django.db import", "= settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info = json.load(f) for", "CustomField if not list: return [] temp_args = list() # pattern to match", "if temp[0] == \"client\": model = \"client\" obj = agent.client elif temp[0] ==", "description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100,", "2: # ignore arg since it is invalid continue if temp[0] == \"client\":", "in script.keys() else [] if s.exists(): i = s.first() i.name = script[\"name\"] i.description", "models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def code(self): if", "= ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"],", "\"\" for item in value: temp_string += item + \",\" return temp_string.strip(\",\") else:", "__str__(self): return self.name @property def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return", "for docker else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f:", "= list() # pattern to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg", "= s.first() i.name = script[\"name\"] i.description = script[\"description\"] i.category = category i.shell =", "it is invalid continue if hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model,", "model_fields.get(**{model: obj}).value if not value and field.default_value: value = field.default_value # check if", "script.keys() else 90 ) args = script[\"args\"] if \"args\" in script.keys() else []", "try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except Exception as e: logger.error(e) continue", "script[\"name\"] i.description = script[\"description\"] i.category = category i.shell = script[\"shell\"] i.default_timeout = default_timeout", "else: # python temp_string = \"\" for item in value: temp_string += item", "favorite = models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout", "as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[", "django.contrib.postgres.fields import ArrayField from django.db import models from logs.models import BaseAuditModel SCRIPT_SHELLS =", "agent, shell: str, args: List[str] = list() ) -> Union[List[str], None]: from core.models", "args are not supported with batch\" elif shell == \"powershell\": temp_string = \"\"", "if value and field.type == \"multiple\": value = format_shell_array(shell, value) elif value and", "\"array args are not supported with batch\" elif shell == \"powershell\": temp_string =", "(\"userdefined\", \"User Defined\"), (\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255)", "from core.models import CustomField if not list: return [] temp_args = list() #", "between the () in regex string = match.group(1) # split by period if", "f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"],", "script and returns json from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args(", "max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args =", "exist, only updating name / desc in case it changes # for install", "= \"agent\" obj = agent else: # ignore arg since it is invalid", "type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except Exception as e:", "return temp_string.strip(\",\") else: # python temp_string = \"\" for item in value: temp_string", "\"default_timeout\" in script.keys() else 90 ) args = script[\"args\"] if \"args\" in script.keys()", "for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category", "if hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model,", "Union[List[str], None]: from core.models import CustomField if not list: return [] temp_args =", "not supported with batch\" elif shell == \"powershell\": temp_string = \"\" for item", "loguru import logger from typing import Any, List, Union from django.conf import settings", "base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def load_community_scripts(cls):", "args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False) category", "invalid continue # replace the value in the arg and push to array", "+= item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) -> str:", "In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename", "else: # ignore arg since it is invalid continue if hasattr(obj, temp[1]): value", "item + \",\" return temp_string.strip(\",\") else: # python temp_string = \"\" for item", "in the arg and push to array # log any unhashable type errors", "elif shell == \"powershell\": return \"$True\" if value else \"$False\" else: # python", "def serialize(script): # serializes the script and returns json from .serializers import ScriptSerializer", "# ignore arg since it is invalid continue if hasattr(obj, temp[1]): value =", "= models.TextField(null=True, blank=True) filename = models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS,", "value in the arg and push to array # log any unhashable type", "returns json from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent,", "\",\" return temp_string.strip(\",\") else: # python temp_string = \"\" for item in value:", "with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as f: info = json.load(f) for script in", "push to array # log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg))", "changes # for install script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") #", "\"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding new community", "@classmethod def parse_script_args( cls, agent, shell: str, args: List[str] = list() ) ->", "not value and field.default_value: value = field.default_value # check if value exists and", "f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\",", "arg since it is invalid continue if temp[0] == \"client\": model = \"client\"", "supported with batch\" elif shell == \"powershell\": temp_string = \"\" for item in", "return temp_args def format_shell_array(shell: str, value: Any) -> str: if shell == \"cmd\":", "uploaded scripts into the database # skip ones that already exist, only updating", "= script[\"shell\"] i.default_timeout = default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as", "= ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90 ) args = script[\"args\"]", "\"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls(", "\"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding new community script:", "check for model and property if len(temp) != 2: # ignore arg since", "period if exists. First should be model and second should be property temp", "if \"category\" in script.keys() else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\"", "!= 2: # ignore arg since it is invalid continue if temp[0] ==", "(\"builtin\", \"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True,", "\"args\" in script.keys() else [] if s.exists(): i = s.first() i.name = script[\"name\"]", ") category = ( script[\"category\"] if \"category\" in script.keys() else \"Community\" ) default_timeout", "models.BooleanField(default=False) category = models.CharField(max_length=100, null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90)", "the database # skip ones that already exist, only updating name / desc", "\"cmd\": return \"array args are not supported with batch\" elif shell == \"powershell\":", "blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name @property", "description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): # serializes", "item in value: temp_string += item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str,", "logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"),", "django.conf import settings from django.contrib.postgres.fields import ArrayField from django.db import models from logs.models", ") script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True),", "array # log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type:", "== \"site\": model = \"site\" obj = agent.site elif temp[0] == \"agent\": model", "i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save( update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ]", "(\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User", "default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else 90 ) args =", ") code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout,", "the script and returns json from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def", ") args = script[\"args\"] if \"args\" in script.keys() else [] if s.exists(): i", "from logs.models import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\",", "if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod", "invalid continue if temp[0] == \"client\": model = \"client\" obj = agent.client elif", "temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except Exception as e: logger.error(e) continue else:", "batch\" elif shell == \"powershell\": temp_string = \"\" for item in value: temp_string", "value, arg)) # type: ignore except Exception as e: logger.error(e) continue else: temp_args.append(arg)", "model = \"agent\" obj = agent else: # ignore arg since it is", "from django.conf import settings from django.contrib.postgres.fields import ArrayField from django.db import models from", "list() ) -> Union[List[str], None]: from core.models import CustomField if not list: return", "temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) -> str: if shell == \"cmd\": return", "else \"0\" elif shell == \"powershell\": return \"$True\" if value else \"$False\" else:", "print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes =", "item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell: str, value: Any) -> str: if", "in args: match = pattern.match(arg) if match: # only get the match between", "replace the value in the arg and push to array # log any", "script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\")", "import settings from django.contrib.postgres.fields import ArrayField from django.db import models from logs.models import", "value = field.default_value # check if value exists and if not use defa", "value and field.default_value: value = field.default_value # check if value exists and if", "temp = string.split(\".\") # check for model and property if len(temp) != 2:", "value and field.type == \"multiple\": value = format_shell_array(shell, value) elif value and field.type", "f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if not", "value and field.type == \"checkbox\": value = format_shell_bool(shell, value) if not value: continue", "script.keys() else [] if s.exists(): i = s.first() i.name = script[\"name\"] i.description =", "-> Union[List[str], None]: from core.models import CustomField if not list: return [] temp_args", "string = match.group(1) # split by period if exists. First should be model", "with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64", "e: logger.error(e) continue else: temp_args.append(arg) return temp_args def format_shell_array(shell: str, value: Any) ->", "if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"] if", "i.description = script[\"description\"] i.category = category i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args", "if value exists and if not use defa if value and field.type ==", "python temp_string = \"\" for item in value: temp_string += item + \",\"", "for item in value: temp_string += item + \",\" return temp_string.strip(\",\") def format_shell_bool(shell:", "= category i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args = args with open(os.path.join(scripts_dir,", "= \"site\" obj = agent.site elif temp[0] == \"agent\": model = \"agent\" obj", "== \"powershell\": return \"$True\" if value else \"$False\" else: # python return \"True\"", "s.exists(): i = s.first() i.name = script[\"name\"] i.description = script[\"description\"] i.category = category", "with batch\" elif shell == \"powershell\": temp_string = \"\" for item in value:", "# load community uploaded scripts into the database # skip ones that already", "defa if value and field.type == \"multiple\": value = format_shell_array(shell, value) elif value", "= script[\"args\"] if \"args\" in script.keys() else [] if s.exists(): i = s.first()", "# pattern to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args:", "] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename =", "models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list,", "pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match = pattern.match(arg) if match: #", "BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES", "return \"$True\" if value else \"$False\" else: # python return \"True\" if value", "ignore arg since it is invalid continue if temp[0] == \"client\": model =", "from django.contrib.postgres.fields import ArrayField from django.db import models from logs.models import BaseAuditModel SCRIPT_SHELLS", "Any, List, Union from django.conf import settings from django.contrib.postgres.fields import ArrayField from django.db", "model = \"client\" obj = agent.client elif temp[0] == \"site\": model = \"site\"", "import CustomField if not list: return [] temp_args = list() # pattern to", "= cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"] if \"category\" in script.keys() else", "SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES =", "= models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type =", "already exist, only updating name / desc in case it changes # for", "since it is invalid continue if hasattr(obj, temp[1]): value = getattr(obj, temp[1]) elif", "import base64 import re from loguru import logger from typing import Any, List,", "\"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category,", "if \"default_timeout\" in script.keys() else 90 ) args = script[\"args\"] if \"args\" in", "settings from django.contrib.postgres.fields import ArrayField from django.db import models from logs.models import BaseAuditModel", "args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") )", "is invalid continue # replace the value in the arg and push to", "field.type == \"checkbox\": value = format_shell_bool(shell, value) if not value: continue else: #", "pathlib import Path from django.conf import settings # load community uploaded scripts into", "\"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding new community script: {script['name']}\") with", "if match: # only get the match between the () in regex string", "and field.default_value: value = field.default_value # check if value exists and if not", "import BaseAuditModel SCRIPT_SHELLS = [ (\"powershell\", \"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ]", "return \"\" @classmethod def load_community_scripts(cls): import json import os from pathlib import Path", "update_fields=[ \"name\", \"description\", \"category\", \"default_timeout\", \"code_base64\", \"shell\", \"args\", ] ) else: print(f\"Adding new", "default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES, default=\"userdefined\" ) args = ArrayField( models.TextField(null=True,", "logger from typing import Any, List, Union from django.conf import settings from django.contrib.postgres.fields", "script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\")", "import ArrayField from django.db import models from logs.models import BaseAuditModel SCRIPT_SHELLS = [", "= agent.site elif temp[0] == \"agent\": model = \"agent\" obj = agent else:", "f: info = json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s =", "filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): # serializes the", "and field.type == \"multiple\": value = format_shell_array(shell, value) elif value and field.type ==", "Path from django.conf import settings # load community uploaded scripts into the database", "= ( script[\"category\"] if \"category\" in script.keys() else \"Community\" ) default_timeout = (", "list: return [] temp_args = list() # pattern to match for injection pattern", "the value in the arg and push to array # log any unhashable", "getattr(field, f\"{model}_fields\") value = None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if", "script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"],", "i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\",", "models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField(", "name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"], script_type=\"builtin\", category=category, default_timeout=default_timeout, args=args, ).save() @staticmethod def serialize(script): #", "def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return", "not list: return [] temp_args = list() # pattern to match for injection", "value = format_shell_bool(shell, value) if not value: continue else: # ignore arg since", "pattern.match(arg) if match: # only get the match between the () in regex", "pattern to match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match", "== \"cmd\": return \"1\" if value else \"0\" elif shell == \"powershell\": return", "i = s.first() i.name = script[\"name\"] i.description = script[\"description\"] i.category = category i.shell", "None if model_fields.filter(**{model: obj}).exists(): value = model_fields.get(**{model: obj}).value if not value and field.default_value:", "# python temp_string = \"\" for item in value: temp_string += item +", "i.default_timeout = default_timeout i.args = args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes", "shell == \"powershell\": temp_string = \"\" for item in value: temp_string += item", "# log any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore", "= \"\" for item in value: temp_string += item + \",\" return temp_string.strip(\",\")", "value: temp_string += item + \",\" return temp_string.strip(\",\") else: # python temp_string =", "cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"] if \"category\" in script.keys() else \"Community\"", "value: Any) -> str: if shell == \"cmd\": return \"array args are not", "= getattr(obj, temp[1]) elif CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field,", "temp_string.strip(\",\") else: # python temp_string = \"\" for item in value: temp_string +=", "logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) filename = models.CharField(max_length=255)", "\"Powershell\"), (\"cmd\", \"Batch (CMD)\"), (\"python\", \"Python\"), ] SCRIPT_TYPES = [ (\"userdefined\", \"User Defined\"),", "= field.default_value # check if value exists and if not use defa if", "-> str: if shell == \"cmd\": return \"1\" if value else \"0\" elif", "else 90 ) args = script[\"args\"] if \"args\" in script.keys() else [] if", "scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir = settings.SCRIPTS_DIR with open(", "exists and if not use defa if value and field.type == \"multiple\": value", "deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type = models.CharField( max_length=100, choices=SCRIPT_TYPES,", "agent.site elif temp[0] == \"agent\": model = \"agent\" obj = agent else: #", "= string.split(\".\") # check for model and property if len(temp) != 2: #", "match for injection pattern = re.compile(\".*\\\\{\\\\{(.*)\\\\}\\\\}.*\") for arg in args: match = pattern.match(arg)", "\"agent\": model = \"agent\" obj = agent else: # ignore arg since it", "( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") cls( code_base64=code_base64, name=script[\"name\"], description=script[\"description\"], filename=script[\"filename\"], shell=script[\"shell\"],", "and returns json from .serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls,", "def load_community_scripts(cls): import json import os from pathlib import Path from django.conf import", "from pathlib import Path from django.conf import settings # load community uploaded scripts", "if not list: return [] temp_args = list() # pattern to match for", "\"site\": model = \"site\" obj = agent.site elif temp[0] == \"agent\": model =", "\"cmd\": return \"1\" if value else \"0\" elif shell == \"powershell\": return \"$True\"", "\"ignore\") return base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def load_community_scripts(cls): import json import", "= match.group(1) # split by period if exists. First should be model and", "be property temp = string.split(\".\") # check for model and property if len(temp)", "temp[0] == \"client\": model = \"client\" obj = agent.client elif temp[0] == \"site\":", "base64.b64decode(base64_bytes).decode(\"ascii\", \"ignore\") else: return \"\" @classmethod def load_community_scripts(cls): import json import os from", "filename = models.CharField(max_length=255) # deprecated shell = models.CharField( max_length=100, choices=SCRIPT_SHELLS, default=\"powershell\" ) script_type", "re from loguru import logger from typing import Any, List, Union from django.conf", "\"shell\", \"args\", ] ) else: print(f\"Adding new community script: {script['name']}\") with open(os.path.join(scripts_dir, script[\"filename\"]),", "= args with open(os.path.join(scripts_dir, script[\"filename\"]), \"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\")", "= list() ) -> Union[List[str], None]: from core.models import CustomField if not list:", "info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter( name=script[\"name\"] ) category = ( script[\"category\"]", "CustomField.objects.filter(model=model, name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value = None", "and field.type == \"checkbox\": value = format_shell_bool(shell, value) if not value: continue else:", "= ArrayField( models.TextField(null=True, blank=True), null=True, blank=True, default=list, ) favorite = models.BooleanField(default=False) category =", "that already exist, only updating name / desc in case it changes #", "null=True, blank=True) code_base64 = models.TextField(null=True, blank=True) default_timeout = models.PositiveIntegerField(default=90) def __str__(self): return self.name", "args = script[\"args\"] if \"args\" in script.keys() else [] if s.exists(): i =", "List[str] = list() ) -> Union[List[str], None]: from core.models import CustomField if not", "name=temp[1]).exists(): field = CustomField.objects.get(model=model, name=temp[1]) model_fields = getattr(field, f\"{model}_fields\") value = None if", "continue if temp[0] == \"client\": model = \"client\" obj = agent.client elif temp[0]", "the () in regex string = match.group(1) # split by period if exists.", "from loguru import logger from typing import Any, List, Union from django.conf import", "ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell: str, args: List[str] = list() )", "i.category = category i.shell = script[\"shell\"] i.default_timeout = default_timeout i.args = args with", "in script.keys() else 90 ) args = script[\"args\"] if \"args\" in script.keys() else", "# ignore arg since it is invalid continue if temp[0] == \"client\": model", "property is invalid continue # replace the value in the arg and push", "def parse_script_args( cls, agent, shell: str, args: List[str] = list() ) -> Union[List[str],", "info = json.load(f) for script in info: if os.path.exists(os.path.join(scripts_dir, script[\"filename\"])): s = cls.objects.filter(script_type=\"builtin\").filter(", "if s.exists(): i = s.first() i.name = script[\"name\"] i.description = script[\"description\"] i.category =", "if shell == \"cmd\": return \"array args are not supported with batch\" elif", "shell: str, args: List[str] = list() ) -> Union[List[str], None]: from core.models import", "case it changes # for install script if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1],", "def __str__(self): return self.name @property def code(self): if self.code_base64: base64_bytes = self.code_base64.encode(\"ascii\", \"ignore\")", "\"client\" obj = agent.client elif temp[0] == \"site\": model = \"site\" obj =", "\"rb\") as f: script_bytes = ( f.read().decode(\"utf-8\").encode(\"ascii\", \"ignore\") ) i.code_base64 = base64.b64encode(script_bytes).decode(\"ascii\") i.save(", "temp_args def format_shell_array(shell: str, value: Any) -> str: if shell == \"cmd\": return", "any unhashable type errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except Exception", "shell == \"powershell\": return \"$True\" if value else \"$False\" else: # python return", "# for docker else: scripts_dir = settings.SCRIPTS_DIR with open( os.path.join(settings.BASE_DIR, \"scripts/community_scripts.json\") ) as", ".serializers import ScriptSerializer return ScriptSerializer(script).data @classmethod def parse_script_args( cls, agent, shell: str, args:", "= models.PositiveIntegerField(default=90) def __str__(self): return self.name @property def code(self): if self.code_base64: base64_bytes =", "script.keys() else \"Community\" ) default_timeout = ( int(script[\"default_timeout\"]) if \"default_timeout\" in script.keys() else", "temp_string = \"\" for item in value: temp_string += item + \",\" return", "\"site\" obj = agent.site elif temp[0] == \"agent\": model = \"agent\" obj =", "\"Built In\"), ] logger.configure(**settings.LOG_CONFIG) class Script(BaseAuditModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True)", "if not settings.DOCKER_BUILD: scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], \"scripts\") # for docker else: scripts_dir =", "ignore arg since it is invalid continue if hasattr(obj, temp[1]): value = getattr(obj,", "else: # ignore arg since property is invalid continue # replace the value", "errors try: temp_args.append(re.sub(\"\\\\{\\\\{.*\\\\}\\\\}\", value, arg)) # type: ignore except Exception as e: logger.error(e)" ]
[ "MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ = [ \"Message\",", "MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ = [ \"Message\", \"MessageDefinition\", \"MessageDefinitionStore\", \"MessageIdStore\", ]", "handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore", "Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt", "# For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to", "pylint.message.message import Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store", "import Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import", "Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message handling.\"\"\" from pylint.message.message import", "to Message handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store", "https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related", "For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message", "related to Message handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition import MessageDefinition from", "details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message handling.\"\"\"", "# Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message handling.\"\"\" from pylint.message.message", "https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message handling.\"\"\" from", "\"\"\"All the classes related to Message handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition", "https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message handling.\"\"\" from pylint.message.message import Message from", "GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes", "# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c)", "the classes related to Message handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition import", "classes related to Message handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition import MessageDefinition", "(c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the classes related to Message handling.\"\"\" from pylint.message.message import Message", "Message handling.\"\"\" from pylint.message.message import Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import", "from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__", "<gh_stars>0 # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright", "from pylint.message.message import Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from", "from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ = [ \"Message\", \"MessageDefinition\",", "pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ = [ \"Message\", \"MessageDefinition\", \"MessageDefinitionStore\",", "under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All", "pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ =", "import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ = [", "Message from pylint.message.message_definition import MessageDefinition from pylint.message.message_definition_store import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore", "the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/pylint/blob/main/CONTRIBUTORS.txt \"\"\"All the", "import MessageDefinitionStore from pylint.message.message_id_store import MessageIdStore __all__ = [ \"Message\", \"MessageDefinition\", \"MessageDefinitionStore\", \"MessageIdStore\"," ]
[ "- res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path, '", "os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse:", "import tensorflow from MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527) def eval_model(model, test):", "save_path = os.path.join(folder, file_name) model, time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" %", "= np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in", "== len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\":", "all methods are applicable in all scenarios methods = [] # baseline methods", "break else: pass for method_name, method in methods[mid:mid+1]: print(\"Running \" + method_name +\"", "[] # baseline methods methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods", "methods are applicable in all scenarios methods = [] # baseline methods methods", "mse = float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2 += [mse]", "% (method_name, rep),time) save_model(model, save_path, test) test_mse = eval_model(model, test) model_type_name = type(model).__name__", "scenarios methods = [] # baseline methods methods += [(\"DirectNN\", DirectNN())] methods +=", "eval_model(model, test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test - test.g) ** 2).mean()) return", "np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test) test_mse = eval_model(model, test) model_type_name =", "range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows))", "import random random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test -", "in range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std =", "random random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test - test.g)", "times2 = [] for method_name, method in methods: # print(\"Running \" + method_name", "+= [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s,", "methods += [(\"DeepIV\", DeepIV())] if training: if rep < repid: continue elif rep", "MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x)", "+= [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if", "s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows", "test) model_type_name = type(model).__name__ print(\"Test MSE of %s: %f\" % (model_type_name, test_mse)) else:", "2).mean()) return mse def save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y,", "AGMM())] methods += [(\"DeepIV\", DeepIV())] if training: if rep < repid: continue elif", "print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\"", "rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat']", "[(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\",", "import torch, add_path import numpy as np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN,", "repid, training=True) rows = [] for i in range(len(scenarios)): s = scenarios[i] means,times", "exist_ok=True) means = [] times = [] for rep in range(num_reps): # Not", "= np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2", "os.path.exists(time_path): res = np.load(time_path) times2 += [res] else: print(time_path, ' not exists') if", "+= [(\"DeepIV\", DeepIV())] if training: if rep < repid: continue elif rep >repid:", "np.load(time_path) times2 += [res] else: print(time_path, ' not exists') if len(means2) == len(methods):", "= float(((g_pred_test - test.g) ** 2).mean()) return mse def save_model(model, save_path, test): g_pred", "= type(model).__name__ print(\"Test MSE of %s: %f\" % (model_type_name, test_mse)) else: means2 =", "training=True) rows = [] for i in range(len(scenarios)): s = scenarios[i] means,times =", "0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5, 1],[2, 1]]]", "AGMM import os import tensorflow from MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527)", "i, j) for s in [8,16,32] for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16,", "+= [res] else: print(time_path, ' not exists') if len(means2) == len(methods): means +=", "model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random", "(method_name, rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path) mse =", "baseline methods methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\",", "= folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res = np.load(time_path) times2 += [res]", "({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows =", "(method_name, rep) save_path = os.path.join(folder, file_name) model, time = method.fit(train.x, train.y, train.z, None)", "\" + str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name)", "# methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y", "== \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32] for i,j", "+ str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) if", "in methods: # print(\"Running \" + method_name +\" \" + str(rep)) file_name =", "test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result folder folder = ROOT_PATH", "for mid in range(6): for repid in range(10): run_experiment(sce, mid, repid, training=True) rows", "training: if rep < repid: continue elif rep >repid: break else: pass for", "run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test", "seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH", "= np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in rows.T])) print('Tabulate", "num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test =", "= [] # baseline methods methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())]", "for method_name, method in methods[mid:mid+1]: print(\"Running \" + method_name +\" \" + str(rep))", "__name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32] for", "i in range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std", "1],[2, 1]]] for sce in scenarios: for mid in range(6): for repid in", "Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import os import tensorflow from MMR_IVs.util", "torch, add_path import numpy as np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\", "+ str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) model,", "[\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16,", "in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5,", "[(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if training: if rep < repid: continue", "in range(6): for repid in range(10): run_experiment(sce, mid, repid, training=True) rows = []", "means += [means2] if len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return", "from MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527) def eval_model(model, test): g_pred_test =", "= eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE of %s: %f\" % (model_type_name,", "n_steps=20))] methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if training: if rep", "def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev,", "os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) **", "in range(num_reps): # Not all methods are applicable in all scenarios methods =", "Not all methods are applicable in all scenarios methods = [] # baseline", "tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result folder", "= np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0))", "if training: if rep < repid: continue elif rep >repid: break else: pass", "file_name) model, time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model,", "print(time_path, ' not exists') if len(means2) == len(methods): means += [means2] if len(times2)", "in all scenarios methods = [] # baseline methods methods += [(\"DirectNN\", DirectNN())]", "(method_name, rep),time) save_model(model, save_path, test) test_mse = eval_model(model, test) model_type_name = type(model).__name__ print(\"Test", "x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed)", "= [\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32] for i,j in [[1,1]]] scenarios", "np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import os", "+= [mse] else: print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep)", "rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods", "method in methods: # print(\"Running \" + method_name +\" \" + str(rep)) file_name", "range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0)", "methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())]", "[] times2 = [] for method_name, method in methods: # print(\"Running \" +", "[times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i,", "j in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in", "g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train,", "str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path):", "MSE of %s: %f\" % (model_type_name, test_mse)) else: means2 = [] times2 =", "std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time:", "',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd,", "for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in", "for method_name, method in methods: # print(\"Running \" + method_name +\" \" +", "random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" +", "%f\" % (model_type_name, test_mse)) else: means2 = [] times2 = [] for method_name,", "+= [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods +=", "rep >repid: break else: pass for method_name, method in methods[mid:mid+1]: print(\"Running \" +", "methods methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())]", "repid: continue elif rep >repid: break else: pass for method_name, method in methods[mid:mid+1]:", "exists') if len(means2) == len(methods): means += [means2] if len(times2) == len(methods): times", "for i in range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0)", "in [[0.5, 1],[2, 1]]] for sce in scenarios: for mid in range(6): for", "np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]]", "= load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result folder folder = ROOT_PATH +", "import os import tensorflow from MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527) def", "mse = float(((g_pred_test - test.g) ** 2).mean()) return mse def save_model(model, save_path, test):", "range(10): run_experiment(sce, mid, repid, training=True) rows = [] for i in range(len(scenarios)): s", "1]]] for sce in scenarios: for mid in range(6): for repid in range(10):", "float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path,", "add_path import numpy as np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM,", "model, time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path,", "= \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res =", "means,times if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s in", "model_type_name = type(model).__name__ print(\"Test MSE of %s: %f\" % (model_type_name, test_mse)) else: means2", "\"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times = [] for rep in range(num_reps):", "save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10,", "2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5, 1],[2, 1]]] for", "% (method_name, rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path) mse", "in methods[mid:mid+1]: print(\"Running \" + method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\"", "in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in [[1, 0.5],[1,", "torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') #", "np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed", "[(\"DeepIV\", DeepIV())] if training: if rep < repid: continue elif rep >repid: break", "not exists') if len(means2) == len(methods): means += [means2] if len(times2) == len(methods):", "= \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) model, time = method.fit(train.x,", "GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if training: if", "DeepIV())] if training: if rep < repid: continue elif rep >repid: break else:", "GMM, DeepIV, AGMM import os import tensorflow from MMR_IVs.util import ROOT_PATH, load_data import", "if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32]", "in [8,16,32] for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i,", "test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False):", "= model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set", "[] for method_name, method in methods: # print(\"Running \" + method_name +\" \"", "for rep in range(num_reps): # Not all methods are applicable in all scenarios", "[8,16,32] for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j", "return means,times if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s", "train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test) test_mse = eval_model(model, test)", "[] for i in range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean =", "from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import os import", "= os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat'] - res['g_true'])", "import numpy as np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV,", "+= [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in [[1, 0.5],[1, 2]]] scenarios +=", "Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import os import tensorflow from MMR_IVs.util import", "# print(\"Running \" + method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\" %", "%s: %f\" % (model_type_name, test_mse)) else: means2 = [] times2 = [] for", "+= [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if training: if rep < repid:", "- test.g) ** 2).mean()) return mse def save_model(model, save_path, test): g_pred = model.predict(test.x)", "exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res = np.load(time_path) times2", "DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\",", "# set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH +", "rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for", "y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in rows.T])) print('Tabulate Table:') # print(tabulate(np.vstack((np.append([\"\"],scenarios),rows)), headers='firstrow',tablefmt='latex'))", "set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\"", "if os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) ** 2).mean()) #", "np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n'", "{}'.format(mse)) means2 += [mse] else: print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\" %", "j) for i, j in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for", "+= [means2] if len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times", "# baseline methods methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods +=", "else: means2 = [] times2 = [] for method_name, method in methods: #", "+- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows", "** 2).mean()) return mse def save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w,", "mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j", "os import tensorflow from MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527) def eval_model(model,", "in range(10): run_experiment(sce, mid, repid, training=True) rows = [] for i in range(len(scenarios)):", "% (model_type_name, test_mse)) else: means2 = [] times2 = [] for method_name, method", "load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\"", "[res] else: print(time_path, ' not exists') if len(means2) == len(methods): means += [means2]", "= ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times = [] for", "res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path, ' not", "[\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32] for i,j in [[1,1]]] scenarios +=", "[mse] else: print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if", "load_data import random random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test", "= method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test) test_mse", "np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2 +=", "range(6): for repid in range(10): run_experiment(sce, mid, repid, training=True) rows = [] for", "rep < repid: continue elif rep >repid: break else: pass for method_name, method", "else: print(time_path, ' not exists') if len(means2) == len(methods): means += [means2] if", "save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid,", "tensorflow from MMR_IVs.util import ROOT_PATH, load_data import random random.seed(527) def eval_model(model, test): g_pred_test", "in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows =", "\\ GMM, DeepIV, AGMM import os import tensorflow from MMR_IVs.util import ROOT_PATH, load_data", "test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test - test.g) ** 2).mean()) return mse", "test) test_mse = eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE of %s: %f\"", "\"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path)", "# result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = []", "test_mse = eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE of %s: %f\" %", "+\" \" + str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder,", "DeepIV, AGMM import os import tensorflow from MMR_IVs.util import ROOT_PATH, load_data import random", "g_pred_test = model.predict(test.x) mse = float(((g_pred_test - test.g) ** 2).mean()) return mse def", "ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times = [] for rep", ">repid: break else: pass for method_name, method in methods[mid:mid+1]: print(\"Running \" + method_name", "[[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5, 1],[2,", "scenario_name+'.npz') # result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means =", "+= [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods", "range(num_reps): # Not all methods are applicable in all scenarios methods = []", "def eval_model(model, test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test - test.g) ** 2).mean())", "file_name) if os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) ** 2).mean())", "j)for i, j in [[0.5, 1],[2, 1]]] for sce in scenarios: for mid", "elif rep >repid: break else: pass for method_name, method in methods[mid:mid+1]: print(\"Running \"", "g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): #", "times2 += [res] else: print(time_path, ' not exists') if len(means2) == len(methods): means", "Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\",", "times = [] for rep in range(num_reps): # Not all methods are applicable", "dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result folder folder =", "method_name, method in methods[mid:mid+1]: print(\"Running \" + method_name +\" \" + str(rep)) file_name", "print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res", "import ROOT_PATH, load_data import random random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x) mse", "\"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) model, time = method.fit(train.x, train.y,", "save_model(model, save_path, test) test_mse = eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE of", "2).mean()) # print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path, ' not exists') time_path", "return mse def save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g,", "j in [[0.5, 1],[2, 1]]] for sce in scenarios: for mid in range(6):", "None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test) test_mse = eval_model(model, test) model_type_name", "% (method_name, rep) if os.path.exists(time_path): res = np.load(time_path) times2 += [res] else: print(time_path,", "methods: # print(\"Running \" + method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\"", "= float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse)) means2 += [mse] else:", "# Not all methods are applicable in all scenarios methods = [] #", "method_name, method in methods: # print(\"Running \" + method_name +\" \" + str(rep))", "(model_type_name, test_mse)) else: means2 = [] times2 = [] for method_name, method in", "' not exists') if len(means2) == len(methods): means += [means2] if len(times2) ==", "len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ ==", "bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in rows.T])) print('Tabulate Table:') # print(tabulate(np.vstack((np.append([\"\"],scenarios),rows)),", "+= [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods", "np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in rows.T])) print('Tabulate Table:')", "' not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res =", "model.predict(test.x) mse = float(((g_pred_test - test.g) ** 2).mean()) return mse def save_model(model, save_path,", "means2 += [mse] else: print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name,", "i, j)for i, j in [[0.5, 1],[2, 1]]] for sce in scenarios: for", "\" + method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep)", "rep) save_path = os.path.join(folder, file_name) model, time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\"", "DirectNN, \\ GMM, DeepIV, AGMM import os import tensorflow from MMR_IVs.util import ROOT_PATH,", "result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times", "for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows)", "len(means2) == len(methods): means += [means2] if len(times2) == len(methods): times += [times2]", "time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res = np.load(time_path) times2 +=", "[[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None]", "os.makedirs(folder, exist_ok=True) means = [] times = [] for rep in range(num_reps): #", "= np.load(time_path) times2 += [res] else: print(time_path, ' not exists') if len(means2) ==", "(method_name, rep) if os.path.exists(time_path): res = np.load(time_path) times2 += [res] else: print(time_path, '", "= [] for method_name, method in methods: # print(\"Running \" + method_name +\"", "res = np.load(save_path) mse = float(((res['g_hat'] - res['g_true']) ** 2).mean()) # print('mse: {}'.format(mse))", "else: print(save_path, ' not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path):", "i, j in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j", "os.path.join(folder, file_name) model, time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time)", "#print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for", "= run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j])", "#print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j)", "= [] for rep in range(num_reps): # Not all methods are applicable in", "time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test)", "train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result folder folder", "repid in range(10): run_experiment(sce, mid, repid, training=True) rows = [] for i in", "[(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods +=", "np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz') # result", "rep),time) save_model(model, save_path, test) test_mse = eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE", "methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y", "#rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in rows.T]))", "+= [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods =", "as np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import", "methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())]", "i, j in [[0.5, 1],[2, 1]]] for sce in scenarios: for mid in", "for sce in scenarios: for mid in range(6): for repid in range(10): run_experiment(sce,", "res = np.load(time_path) times2 += [res] else: print(time_path, ' not exists') if len(means2)", "method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path =", "test_mse)) else: means2 = [] times2 = [] for method_name, method in methods:", "in scenarios: for mid in range(6): for repid in range(10): run_experiment(sce, mid, repid,", "rep in range(num_reps): # Not all methods are applicable in all scenarios methods", "% (method_name, rep) save_path = os.path.join(folder, file_name) model, time = method.fit(train.x, train.y, train.z,", "j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows", "file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res", "if os.path.exists(time_path): res = np.load(time_path) times2 += [res] else: print(time_path, ' not exists')", "+= [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5, 1],[2, 1]]] for sce in", "test.g) ** 2).mean()) return mse def save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path,", "len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\": scenarios", "methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))]", "[(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods +=", "mid, repid, training=True) rows = [] for i in range(len(scenarios)): s = scenarios[i]", "= [] times2 = [] for method_name, method in methods: # print(\"Running \"", "mid in range(6): for repid in range(10): run_experiment(sce, mid, repid, training=True) rows =", "[] times = [] for rep in range(num_reps): # Not all methods are", "== len(methods): means += [means2] if len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0))", "[] for rep in range(num_reps): # Not all methods are applicable in all", "all scenarios methods = [] # baseline methods methods += [(\"DirectNN\", DirectNN())] methods", "np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for j in range(len(mean))]] print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) #", "run_experiment(sce, mid, repid, training=True) rows = [] for i in range(len(scenarios)): s =", "scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f})", "save_path, test) test_mse = eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE of %s:", "methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\", Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods", "def save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred) def", "= os.path.join(folder, file_name) model, time = method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name,", "applicable in all scenarios methods = [] # baseline methods methods += [(\"DirectNN\",", "Vanilla2SLS())] methods += [(\"Poly2SLS\", Poly2SLS())] methods += [(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\",", "method in methods[mid:mid+1]: print(\"Running \" + method_name +\" \" + str(rep)) file_name =", "< repid: continue elif rep >repid: break else: pass for method_name, method in", "methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if training: if rep <", "float(((g_pred_test - test.g) ** 2).mean()) return mse def save_model(model, save_path, test): g_pred =", "= model.predict(test.x) mse = float(((g_pred_test - test.g) ** 2).mean()) return mse def save_model(model,", "pass for method_name, method in methods[mid:mid+1]: print(\"Running \" + method_name +\" \" +", "method.fit(train.x, train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test) test_mse =", "[[0.5, 1],[2, 1]]] for sce in scenarios: for mid in range(6): for repid", "[means2] if len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if", "rows = [] for i in range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False)", "+ \"/data/mendelian/\" + scenario_name+'.npz') # result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder,", "s in [8,16,32] for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for", "ROOT_PATH, load_data import random random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x) mse =", "random.seed(527) def eval_model(model, test): g_pred_test = model.predict(test.x) mse = float(((g_pred_test - test.g) **", "print('time: ',np.mean(times,axis=0),np.std(times,axis=0)) # methods = np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error", "= np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e", "numpy as np from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM", "[[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in [[1, 0.5],[1, 2]]]", "g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed)", "scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32] for i,j in [[1,1]]]", "[\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5, 1],[2, 1]]] for sce in scenarios:", "# print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path, ' not exists') time_path =", "scenarios: for mid in range(6): for repid in range(10): run_experiment(sce, mid, repid, training=True)", "not exists') time_path = folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res = np.load(time_path)", "mse def save_model(model, save_path, test): g_pred = model.predict(test.x) np.savez(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred)", "else: pass for method_name, method in methods[mid:mid+1]: print(\"Running \" + method_name +\" \"", "len(methods): means += [means2] if len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0))", "folder+\"%s_%d_time.npy\" % (method_name, rep) if os.path.exists(time_path): res = np.load(time_path) times2 += [res] else:", "str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) model, time", "for s in [8,16,32] for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)", "sce in scenarios: for mid in range(6): for repid in range(10): run_experiment(sce, mid,", "print(\"Running \" + method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\" % (method_name,", "if len(times2) == len(methods): times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__", "baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import os import tensorflow", "folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times =", "+ scenario_name+'.npz') # result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means", "+ \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times = [] for rep in", "seed torch.manual_seed(seed) np.random.seed(seed) tensorflow.set_random_seed(seed) train, dev, test = load_data(ROOT_PATH + \"/data/mendelian/\" + scenario_name+'.npz')", "= [] times = [] for rep in range(num_reps): # Not all methods", "i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in [[1,", "\"/data/mendelian/\" + scenario_name+'.npz') # result folder folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True)", "= [] for i in range(len(scenarios)): s = scenarios[i] means,times = run_experiment(s,0,0,training=False) mean", "for i, j in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i,", "folder = ROOT_PATH + \"/results/mendelian/\"+scenario_name+\"/\" os.makedirs(folder, exist_ok=True) means = [] times = []", "print(\"Test MSE of %s: %f\" % (model_type_name, test_mse)) else: means2 = [] times2", "np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in", "scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j)for i, j in [[0.5, 1],[2, 1]]] for sce", "rep) if os.path.exists(time_path): res = np.load(time_path) times2 += [res] else: print(time_path, ' not", "file_name = \"%s_%d.npz\" % (method_name, rep) save_path = os.path.join(folder, file_name) model, time =", "means2 = [] times2 = [] for method_name, method in methods: # print(\"Running", "continue elif rep >repid: break else: pass for method_name, method in methods[mid:mid+1]: print(\"Running", "if rep < repid: continue elif rep >repid: break else: pass for method_name,", "methods = [] # baseline methods methods += [(\"DirectNN\", DirectNN())] methods += [(\"Vanilla2SLS\",", "j) for s in [8,16,32] for i,j in [[1,1]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i,", "scenarios += [\"mendelian_{}_{}_{}\".format(16, i, j) for i, j in [[1, 0.5],[1, 2]]] scenarios", "type(model).__name__ print(\"Test MSE of %s: %f\" % (model_type_name, test_mse)) else: means2 = []", "i, j) for i, j in [[1, 0.5],[1, 2]]] scenarios += [\"mendelian_{}_{}_{}\".format(16, i,", "save_path = os.path.join(folder, file_name) if os.path.exists(save_path): res = np.load(save_path) mse = float(((res['g_hat'] -", "methods[mid:mid+1]: print(\"Running \" + method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\" %", "print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit] coordinates'.join(['{'+'\\n'.join(e)+'};\\n' for e in rows.T])) print('Tabulate Table:') #", "\"__main__\": scenarios = [\"mendelian_{}_{}_{}\".format(s, i, j) for s in [8,16,32] for i,j in", "import Poly2SLS, Vanilla2SLS, DirectNN, \\ GMM, DeepIV, AGMM import os import tensorflow from", "times += [times2] #print('means',np.mean(np.array(means),axis=0)) #print('std',np.std(np.array(means),axis=0)) return means,times if __name__ == \"__main__\": scenarios =", "= scenarios[i] means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows +=", "are applicable in all scenarios methods = [] # baseline methods methods +=", "** 2).mean()) # print('mse: {}'.format(mse)) means2 += [mse] else: print(save_path, ' not exists')", "of %s: %f\" % (model_type_name, test_mse)) else: means2 = [] times2 = []", "eval_model(model, test) model_type_name = type(model).__name__ print(\"Test MSE of %s: %f\" % (model_type_name, test_mse))", "+ method_name +\" \" + str(rep)) file_name = \"%s_%d.npz\" % (method_name, rep) save_path", "means,times = run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +-", "y=test.y, g_true=test.g, g_hat=g_pred) def run_experiment(scenario_name,mid,repid, num_reps=10, seed=527,training=False): # set random seed torch.manual_seed(seed) np.random.seed(seed)", "means = [] times = [] for rep in range(num_reps): # Not all", "for repid in range(10): run_experiment(sce, mid, repid, training=True) rows = [] for i", "run_experiment(s,0,0,training=False) mean = np.mean(means,axis=0) std = np.std(means,axis=0) rows += [[\"({},{:.4f}) +- ({:.3f},{:.3f})\".format(s,mean[j],std[j],std[j]) for", "train.y, train.z, None) np.save(folder+\"%s_%d_time.npy\" % (method_name, rep),time) save_model(model, save_path, test) test_mse = eval_model(model,", "[(\"GMM\", GMM(g_model=\"2-layer\", n_steps=20))] methods += [(\"AGMM\", AGMM())] methods += [(\"DeepIV\", DeepIV())] if training:", "= np.array([\"DirectNN\",\"Vanilla2SLS\",\"Poly2SLS\",\"GMM\",\"AGMM\",\"DeepIV\"])[:,None] rows = np.array(rows) #rows = np.vstack((methods,rows)) print('addplot+[mark=*,error bars/.cd, y dir=both,y explicit]", "if len(means2) == len(methods): means += [means2] if len(times2) == len(methods): times +=" ]
[ "db.Column('comm_dt', db.Date, doc='It is the airing, broadcast, cablecast or other dissemination of the", "pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name =", "= db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code", "db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date =", "class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer,", "electioneering communication (date reported on page 1 of Form 9)') disbursement_date = db.Column('disb_dt',", "index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id',", "= db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True)", "link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30,", "is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is the airing, broadcast, cablecast or", "db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String)", "db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id =", "9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual disbursements and execution", "= db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date", "date that triggers disclosure of the electioneering communication (date reported on page 1", "candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num',", "db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name", "import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id =", "contracts creating an obligation to make disbursements (SB date of disbursement)') disbursement_amount =", "db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type", "index=True) #new columns added from ware house transition action_code = db.Column('action_cd', db.String) action_code_full", "Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual disbursements and", "primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id',", "sqlalchemy.dialects.postgresql import TSVECTOR from .base import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id", "index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer,", "= db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is the date that triggers disclosure", "date includes actual disbursements and execution of contracts creating an obligation to make", "airing, broadcast, cablecast or other dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date,", "from sqlalchemy.dialects.postgresql import TSVECTOR from .base import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv'", "= db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model):", "that triggers disclosure of the electioneering communication (date reported on page 1 of", "broadcast, cablecast or other dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The", "CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True)", "house transition action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi',", "index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name =", "db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number", "db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True)", "mentions one candidate the full cost of the communication is listed.\") communication_date =", "db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String)", "db.Date, doc='The pubic distribution date is the date that triggers disclosure of the", "= db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state =", "doc=\"If an electioneering cost targets several candidates, the total cost is divided by", "it only mentions one candidate the full cost of the communication is listed.\")", "= db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True)", "idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String)", "db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district =", "= db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url", "db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx", "= db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator", "electioneering cost targets several candidates, the total cost is divided by the number", "an obligation to make disbursements (SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30,", "= db.Column(db.Integer, doc=\"The identifier for each electioneering record\") link_id = db.Column(db.Integer) sb_link_id =", "image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer,", "communication_class = db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind',", "action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type", "db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator =", "db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String)", "= db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True)", "sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an", "communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is the date that", "db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String)", "index=True) candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district',", "communication (date reported on page 1 of Form 9)') disbursement_date = db.Column('disb_dt', db.Date,", "doc='Disbursement date includes actual disbursements and execution of contracts creating an obligation to", "or other dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution", "cost is divided by the number of candidates. If it only mentions one", "total cost is divided by the number of candidates. If it only mentions", "= db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle =", "state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String,", "db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If", "distribution date is the date that triggers disclosure of the electioneering communication (date", "db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date =", "index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns added from ware house transition", "= db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True)", "db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle',", "db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type", "= db.Column('s_o_ind', db.String, index=True) #new columns added from ware house transition action_code =", "only mentions one candidate the full cost of the communication is listed.\") communication_date", "db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text = db.Column(TSVECTOR) @property def election_type(self): return self.election_type_raw[:1]", "index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date)", "index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name", "2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number =", "= db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual disbursements and execution of contracts", "= db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text = db.Column(TSVECTOR)", "committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name =", "#new columns added from ware house transition action_code = db.Column('action_cd', db.String) action_code_full =", "db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr',", "db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state", "index=True) sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for each electioneering", "beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The", "of Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual disbursements", "= db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date", "TSVECTOR from .base import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer,", "= db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name =", "db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class',", "= db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount", "db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district =", "db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num", "public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is the date that triggers", "divided by the number of candidates. If it only mentions one candidate the", "support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns added from ware house transition action_code", "= db.Column('comm_dt', db.Date, doc='It is the airing, broadcast, cablecast or other dissemination of", "= db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String,", "db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id =", "actual disbursements and execution of contracts creating an obligation to make disbursements (SB", "index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name',", "candidates, the total cost is divided by the number of candidates. If it", "disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual disbursements and execution of", "db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer)", "db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url =", "= db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num',", "db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office", "db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text = db.Column(TSVECTOR) @property def", "= db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for each electioneering record\") link_id", "db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district", "record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share',", "page 1 of Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes", "= db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost targets several candidates, the total", "db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True)", "= db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns added from", "db.Numeric(30, 2), doc=\"If an electioneering cost targets several candidates, the total cost is", "db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose", "= db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True)", "db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id", "report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String,", "= db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text = db.Column(TSVECTOR) @property def election_type(self): return", "action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description", "schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number =", "db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm',", "db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office =", "db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt',", "db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True)", "db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state =", "(date reported on page 1 of Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True,", "db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code =", "db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String)", "communication is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is the airing, broadcast, cablecast", "electioneering record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share =", "= db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String,", "index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type =", "db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class =", "from .base import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True)", "db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district',", "number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost targets", "db.Date, index=True, doc='Disbursement date includes actual disbursements and execution of contracts creating an", "db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind',", "db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full =", "= db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2),", "= db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True)", "index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for each electioneering record\") link_id = db.Column(db.Integer)", "db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id =", "= db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full", "db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type =", "includes actual disbursements and execution of contracts creating an obligation to make disbursements", "db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost", "several candidates, the total cost is divided by the number of candidates. If", "db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number", "= db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True)", "of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year", "communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String,", "db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost',", "and execution of contracts creating an obligation to make disbursements (SB date of", "db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id", "db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__", "full cost of the communication is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is", "= db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year =", "obligation to make disbursements (SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2),", "index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc',", "amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url", "candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name", "index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name =", "db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator", "class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String,", "original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id',", "primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle", "db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id =", "db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name", "committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm',", "each electioneering record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share", "db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True)", "candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String)", "candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String,", "candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String,", "= db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost targets several", "doc='The pubic distribution date is the date that triggers disclosure of the electioneering", "db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text", "reported on page 1 of Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement", "__tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name", "db.Column(db.Integer, doc=\"The identifier for each electioneering record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String)", "disbursements and execution of contracts creating an obligation to make disbursements (SB date", "db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office =", "= db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp',", "db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full", "by the number of candidates. If it only mentions one candidate the full", "added from ware house transition action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String)", "candidate_id = db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String,", "db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id", "candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True)", "index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office',", "communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String,", "the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is the date", "date is the date that triggers disclosure of the electioneering communication (date reported", "committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String,", "= db.Column(db.String) number_of_candidates = db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering", "candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String)", "disbursements (SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description =", "db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True)", "file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw", "db.String) report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer,", "db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator =", "index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num =", "db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full", "db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type", "db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True)", "= db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm',", "db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for each electioneering record\") link_id =", "db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st',", "db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id',", "an electioneering cost targets several candidates, the total cost is divided by the", "= db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True)", "execution of contracts creating an obligation to make disbursements (SB date of disbursement)')", "db.Column(db.Numeric) calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost targets several candidates,", "db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True)", "the communication is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is the airing, broadcast,", "db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual disbursements and execution of contracts creating", "to make disbursements (SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True)", "index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc',", "one candidate the full cost of the communication is listed.\") communication_date = db.Column('comm_dt',", "db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount =", "cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String,", "disclosure of the electioneering communication (date reported on page 1 of Form 9)')", "= db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url =", "the airing, broadcast, cablecast or other dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt',", "db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office',", "doc=\"The identifier for each electioneering record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates", "=db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True)", "= db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id", "db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost targets several candidates, the total cost", "db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new", "2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full =", "= db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier", "creating an obligation to make disbursements (SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt',", "= db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw =", "identifier for each electioneering record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates =", "doc='It is the airing, broadcast, cablecast or other dissemination of the communication') public_distribution_date", "ware house transition action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator =", "candidate_office = db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st',", "report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True)", "db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String)", "primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year", "calculated_candidate_share = db.Column('calculated_cand_share', db.Numeric(30, 2), doc=\"If an electioneering cost targets several candidates, the", "= db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office", ".base import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id", "db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt',", "db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text = db.Column(TSVECTOR) @property", "transition action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String)", "'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm',", "= db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String)", "date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String)", "transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp',", "of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is the", "from ware house transition action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator", "candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True)", "on page 1 of Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date", "db.Date, doc='It is the airing, broadcast, cablecast or other dissemination of the communication')", "candidates. If it only mentions one candidate the full cost of the communication", "2), doc=\"If an electioneering cost targets several candidates, the total cost is divided", "db.String) report_year = db.Column('rpt_yr', db.Integer) cycle = db.Column('election_cycle', db.Integer, index=True) form_type_code = db.Column('filing_form',", "the full cost of the communication is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It", "db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name", "targets several candidates, the total cost is divided by the number of candidates.", "transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type =", "dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is", "db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String)", "db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for", "db.Integer, index=True) form_type_code = db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full =", "transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String)", "sub_id = db.Column(db.Integer, doc=\"The identifier for each electioneering record\") link_id = db.Column(db.Integer) sb_link_id", "db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc',", "If it only mentions one candidate the full cost of the communication is", "db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns added from ware house", "= db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id", "for each electioneering record\") link_id = db.Column(db.Integer) sb_link_id = db.Column(db.String) number_of_candidates = db.Column(db.Numeric)", "= db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True)", "index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number", "db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp',", "of contracts creating an obligation to make disbursements (SB date of disbursement)') disbursement_amount", "db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp',", "the total cost is divided by the number of candidates. If it only", "__tablename__ = 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id", "the number of candidates. If it only mentions one candidate the full cost", "index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number =", "candidate_id = db.Column('cand_id', db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String)", "receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text =", "= db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num',", "candidate the full cost of the communication is listed.\") communication_date = db.Column('comm_dt', db.Date,", "import TSVECTOR from .base import db class CommunicationCost(db.Model): __tablename__ = 'ofec_communication_cost_mv' sub_id =", "index=True, doc='Disbursement date includes actual disbursements and execution of contracts creating an obligation", "purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer)", "candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state", "make disbursements (SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description", "index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date,", "cost targets several candidates, the total cost is divided by the number of", "index=True) purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num',", "primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String, index=True) committee_id =", "= db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True)", "of the communication is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is the airing,", "index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number", "= db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr', db.Integer,", "db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose =", "db.Numeric(30, 2), index=True) transaction_type = db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full", "pubic distribution date is the date that triggers disclosure of the electioneering communication", "sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id', db.String,", "= db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm',", "other dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date", "disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year =", "cablecast or other dissemination of the communication') public_distribution_date = db.Column('pub_distrib_dt', db.Date, doc='The pubic", "columns added from ware house transition action_code = db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc',", "disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc', db.String) report_year = db.Column('rpt_yr',", "number of candidates. If it only mentions one candidate the full cost of", "triggers disclosure of the electioneering communication (date reported on page 1 of Form", "= db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String) candidate_office = db.Column('cand_office', db.String, index=True)", "(SB date of disbursement)') disbursement_amount = db.Column('reported_disb_amt', db.Numeric(30, 2), index=True) purpose_description = db.Column('disb_desc',", "index=True) candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30,", "db.String) candidate_last_name = db.Column('s_o_cand_l_nm', db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String)", "= db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String,", "is the airing, broadcast, cablecast or other dissemination of the communication') public_distribution_date =", "= db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True)", "= db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv'", "db.Integer) amendment_indicator = db.Column('amndt_ind', db.String) receipt_date = db.Column('receipt_dt', db.Date) election_type_raw = db.Column('election_tp', db.String)", "db.String) candidate_office_district = db.Column('s_o_cand_office_district', db.String, index=True) candidate_office = db.Column('s_o_cand_office', db.String, index=True) candidate_office_full =db.Column('s_o_cand_office_desc',", "sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for each electioneering record\")", "is divided by the number of candidates. If it only mentions one candidate", "the electioneering communication (date reported on page 1 of Form 9)') disbursement_date =", "schedule_type_full = db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number =", "db.Integer) image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx =", "the date that triggers disclosure of the electioneering communication (date reported on page", "election_type_raw = db.Column('election_tp', db.String) pdf_url = db.Column(db.String) purpose_description_text = db.Column(TSVECTOR) @property def election_type(self):", "db.String, index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String) candidate_name = db.Column('s_o_cand_nm', db.String) candidate_last_name", "Electioneering(db.Model): __tablename__ = 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True)", "= db.Column('action_cd', db.String) action_code_full = db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description =", "cost of the communication is listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is the", "tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True) class", "is the date that triggers disclosure of the electioneering communication (date reported on", "file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String, index=True) class Electioneering(db.Model): __tablename__ =", "of candidates. If it only mentions one candidate the full cost of the", "= 'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id =", "= db.Column('cand_office', db.String, index=True) candidate_district = db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String,", "report_year = db.Column('rpt_yr', db.Integer, index=True) file_number = db.Column('file_num', db.Integer) amendment_indicator = db.Column('amndt_ind', db.String)", "index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class = db.Column('communication_class', db.String, index=True) purpose = db.Column('communication_class_desc',", "= 'ofec_electioneering_mv' idx = db.Column(db.Integer, primary_key=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name =", "committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name = db.Column('cand_name', db.String)", "db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type = db.Column('rpt_tp', db.String) report_year = db.Column('rpt_yr', db.Integer)", "db.Column('pub_distrib_dt', db.Date, doc='The pubic distribution date is the date that triggers disclosure of", "db.String, index=True) sb_image_num = db.Column(db.String, index=True) sub_id = db.Column(db.Integer, doc=\"The identifier for each", "= db.Column('transaction_tp', db.String) communication_type = db.Column('communication_tp', db.String, index=True) communication_type_full = db.Column('communication_tp_desc', db.String) communication_class", "db.Column('schedule_type_desc', db.String) tran_id = db.Column(db.String) file_number = db.Column('file_num', db.Integer) image_number = db.Column('image_num', db.String,", "= db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc', db.String) candidate_office_district", "candidate_office_full =db.Column('s_o_cand_office_desc', db.String) transaction_date = db.Column('communication_dt', db.Date, index=True) transaction_amount = db.Column('communication_cost', db.Numeric(30, 2),", "form_type_code = db.Column('filing_form', db.String, index=True) schedule_type = db.Column(db.String, index=True) schedule_type_full = db.Column('schedule_type_desc', db.String)", "= db.Column('action_cd_desc', db.String) primary_general_indicator = db.Column('s_o_rpt_pgi', db.String) primary_general_indicator_description = db.Column('s_o_rpt_pgi_desc', db.String) report_type =", "db.String, index=True) committee_id = db.Column('cmte_id', db.String, index=True) committee_name = db.Column(db.String) pdf_url = db.Column(db.String)", "candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String, index=True) sb_image_num = db.Column(db.String,", "1 of Form 9)') disbursement_date = db.Column('disb_dt', db.Date, index=True, doc='Disbursement date includes actual", "db.String, index=True) committee_name = db.Column('cmte_nm', db.String) candidate_id = db.Column('cand_id', db.String, index=True) candidate_name =", "db.String, index=True) #new columns added from ware house transition action_code = db.Column('action_cd', db.String)", "'ofec_communication_cost_mv' sub_id = db.Column(db.Integer, primary_key=True) original_sub_id = db.Column('orig_sub_id', db.Integer, index=True) candidate_id = db.Column('cand_id',", "db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String, index=True) state_full = db.Column('s_o_cand_office_st_desc',", "index=True) purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns", "of the electioneering communication (date reported on page 1 of Form 9)') disbursement_date", "purpose = db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns added", "db.String) candidate_middle_name = db.Column('s_o_cand_m_nm', db.String) candidate_first_name = db.Column('s_o_cand_f_nm', db.String) candidate_office_state = db.Column('s_o_cand_office_st', db.String,", "listed.\") communication_date = db.Column('comm_dt', db.Date, doc='It is the airing, broadcast, cablecast or other", "= db.Column('cand_office_district', db.String, index=True) candidate_state = db.Column('cand_office_st', db.String, index=True) beginning_image_number = db.Column('f9_begin_image_num', db.String,", "communication_date = db.Column('comm_dt', db.Date, doc='It is the airing, broadcast, cablecast or other dissemination", "db.Column('communication_class_desc', db.String, index=True) support_oppose_indicator = db.Column('s_o_ind', db.String, index=True) #new columns added from ware", "db.Column('s_o_ind', db.String, index=True) #new columns added from ware house transition action_code = db.Column('action_cd'," ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "from .nanmin import nanmin, TensorNanMin from .all import all, TensorAll from .any import", "from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin,", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", ".var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std from .nanvar", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "TensorMomentMap, TensorMomentCombine from .std import std from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\", "License. # You may obtain a copy of the License at # #", ".sum import sum, TensorSum from .nansum import nansum, TensorNanSum from .prod import prod,", "TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax,", ".nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap,", "Holding Ltd. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import", "max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean)", "permissions and # limitations under the License. from .sum import sum, TensorSum from", "setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor,", "TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax,", "law or agreed to in writing, software # distributed under the License is", "nanmin, TensorNanMin from .all import all, TensorAll from .any import any, TensorAny from", "# Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed under the Apache", "the License for the specific language governing permissions and # limitations under the", "compliance with the License. # You may obtain a copy of the License", "prod, TensorProd from .nanprod import nanprod, TensorNanProd from .max import max, TensorMax from", "argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var) setattr(Tensor, 'std', std)", "coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed", "import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose import allclose from", "from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose import allclose from .array_equal import array_equal", "language governing permissions and # limitations under the License. from .sum import sum,", "TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax,", "mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from", "TensorCumsum from .cumprod import cumprod, TensorCumprod from .var import var, TensorVar, TensorMoment, TensorMomentMap,", "this file except in compliance with the License. # You may obtain a", "from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd", "import sum, TensorSum from .nansum import nansum, TensorNanSum from .prod import prod, TensorProd", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "import all, TensorAll from .any import any, TensorAny from .mean import mean, TensorMean,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "allclose from .array_equal import array_equal def _install(): from ..core import Tensor setattr(Tensor, 'sum',", "prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any)", "cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod from .var import var, TensorVar, TensorMoment,", "argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from", "utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed under", "import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from", "nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax", "from .std import std from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine", "ANY KIND, either express or implied. # See the License for the specific", "from .nanprod import nanprod, TensorNanProd from .max import max, TensorMax from .nanmax import", "from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum", "setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor,", ".std import std from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from", ".nansum import nansum, TensorNanSum from .prod import prod, TensorProd from .nanprod import nanprod,", "import cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod from .var import var, TensorVar,", "from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap,", "TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose", "TensorAll from .any import any, TensorAny from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine", "from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod from .var", "import nansum, TensorNanSum from .prod import prod, TensorProd from .nanprod import nanprod, TensorNanProd", "import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std from .nanvar import", "TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose import allclose from .array_equal import", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum)", "TensorNanProd from .max import max, TensorMax from .nanmax import nanmax, TensorNanMax from .min", "use this file except in compliance with the License. # You may obtain", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "not use this file except in compliance with the License. # You may", "TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "Alibaba Group Holding Ltd. # # Licensed under the Apache License, Version 2.0", ".array_equal import array_equal def _install(): from ..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor,", "-*- # Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed under the", "See the License for the specific language governing permissions and # limitations under", ".nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from", ".nancumsum import nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from", "'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all',", "License, Version 2.0 (the \"License\"); # you may not use this file except", "sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all)", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "import nanprod, TensorNanProd from .max import max, TensorMax from .nanmax import nanmax, TensorNanMax", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "\\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from .nancumsum import nancumsum, TensorNanCumsum from", "-*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd. # #", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine", "TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin", "from .allclose import allclose from .array_equal import array_equal def _install(): from ..core import", "import array_equal def _install(): from ..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod',", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "import max, TensorMax from .nanmax import nanmax, TensorNanMax from .min import min, TensorMin", "import cumprod, TensorCumprod from .var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std", "from .max import max, TensorMax from .nanmax import nanmax, TensorNanMax from .min import", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any',", "'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum',", "OF ANY KIND, either express or implied. # See the License for the", "TensorNanMin from .all import all, TensorAll from .any import any, TensorAny from .mean", "var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std from .nanvar import nanvar,", "import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine", "nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from .nancumsum import", "2.0 (the \"License\"); # you may not use this file except in compliance", "import allclose from .array_equal import array_equal def _install(): from ..core import Tensor setattr(Tensor,", "from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk,", "TensorCountNonzero from .allclose import allclose from .array_equal import array_equal def _install(): from ..core", "# you may not use this file except in compliance with the License.", "TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std from .nanvar import nanvar, TensorNanVar, TensorNanMoment,", "Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed under the Apache License,", "all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin)", "'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var) setattr(Tensor, 'std',", "setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var) setattr(Tensor, 'std', std) _install()", "agreed to in writing, software # distributed under the License is distributed on", "from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\", "TensorNanMomentCombine from .nanstd import nanstd from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod import", "TensorAny from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "# limitations under the License. from .sum import sum, TensorSum from .nansum import", "\\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import", "(the \"License\"); # you may not use this file except in compliance with", "TensorCumprod from .var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std", "any, TensorAny from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean,", "limitations under the License. from .sum import sum, TensorSum from .nansum import nansum,", "import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from .cumprod", "# # Unless required by applicable law or agreed to in writing, software", "from .prod import prod, TensorProd from .nanprod import nanprod, TensorNanProd from .max import", "nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from", "nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from .cumprod import", "express or implied. # See the License for the specific language governing permissions", ".allclose import allclose from .array_equal import array_equal def _install(): from ..core import Tensor", ".max import max, TensorMax from .nanmax import nanmax, TensorNanMax from .min import min,", "import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from .nancumsum", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "for the specific language governing permissions and # limitations under the License. from", "except in compliance with the License. # You may obtain a copy of", "argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var)", "by applicable law or agreed to in writing, software # distributed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", ".cumprod import cumprod, TensorCumprod from .var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from", "setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor,", "from .nansum import nansum, TensorNanSum from .prod import prod, TensorProd from .nanprod import", "sum, TensorSum from .nansum import nansum, TensorNanSum from .prod import prod, TensorProd from", "from .nanmax import nanmax, TensorNanMax from .min import min, TensorMin from .nanmin import", "TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from .nancumsum import nancumsum, TensorNanCumsum", "either express or implied. # See the License for the specific language governing", "TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin", "from .array_equal import array_equal def _install(): from ..core import Tensor setattr(Tensor, 'sum', sum)", "setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor,", "setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "specific language governing permissions and # limitations under the License. from .sum import", "TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import argmin,", "'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean',", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin',", "from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\", "from .var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std from", "import std from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd", "License. from .sum import sum, TensorSum from .nansum import nansum, TensorNanSum from .prod", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", ".min import min, TensorMin from .nanmin import nanmin, TensorNanMin from .all import all,", "Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min)", "setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var) setattr(Tensor,", "TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import", "cumprod, TensorCumprod from .var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import", "all, TensorAll from .any import any, TensorAny from .mean import mean, TensorMean, TensorMeanChunk,", ".nanmax import nanmax, TensorNanMax from .min import min, TensorMin from .nanmin import nanmin,", "file except in compliance with the License. # You may obtain a copy", ".all import all, TensorAll from .any import any, TensorAny from .mean import mean,", ".nanstd import nanstd from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod", "TensorMin from .nanmin import nanmin, TensorNanMin from .all import all, TensorAll from .any", "max, TensorMax from .nanmax import nanmax, TensorNanMax from .min import min, TensorMin from", "from .cumsum import cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod from .var import", "TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod", "from .nanstd import nanstd from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod import nancumprod,", "the License. from .sum import sum, TensorSum from .nansum import nansum, TensorNanSum from", ".mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine", "_install(): from ..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max',", "..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor,", "\\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod", "'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var) setattr(Tensor, 'std', std) _install() del", "array_equal def _install(): from ..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod)", "TensorNanArgmaxCombine from .argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin,", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax)", "setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor,", "License for the specific language governing permissions and # limitations under the License.", "from .min import min, TensorMin from .nanmin import nanmin, TensorNanMin from .all import", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose import allclose from .array_equal", "'mean', mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod',", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding", "the License. # You may obtain a copy of the License at #", ".argmin import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap,", "nansum, TensorNanSum from .prod import prod, TensorProd from .nanprod import nanprod, TensorNanProd from", "std from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import", ".nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from", "nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from", ".argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap,", "to in writing, software # distributed under the License is distributed on an", "and # limitations under the License. from .sum import sum, TensorSum from .nansum", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor,", "TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum", "Group Holding Ltd. # # Licensed under the Apache License, Version 2.0 (the", "import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min',", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "nanstd from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "TensorNanSum from .prod import prod, TensorProd from .nanprod import nanprod, TensorNanProd from .max", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", ".prod import prod, TensorProd from .nanprod import nanprod, TensorNanProd from .max import max,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "import min, TensorMin from .nanmin import nanmin, TensorNanMin from .all import all, TensorAll", "required by applicable law or agreed to in writing, software # distributed under", "from .any import any, TensorAny from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from", "'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var',", "# -*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd. #", "python # -*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd.", "from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose import", "from .all import all, TensorAll from .any import any, TensorAny from .mean import", "nanmax, TensorNanMax from .min import min, TensorMin from .nanmin import nanmin, TensorNanMin from", "applicable law or agreed to in writing, software # distributed under the License", ".nanmin import nanmin, TensorNanMin from .all import all, TensorAll from .any import any,", "TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from .cumprod import cumprod,", ".cumsum import cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod from .var import var,", "import nanstd from .nancumsum import nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from", "the specific language governing permissions and # limitations under the License. from .sum", "def _install(): from ..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor,", "<reponame>sighingnow/mars<filename>mars/tensor/reduction/__init__.py #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group", "nanprod, TensorNanProd from .max import max, TensorMax from .nanmax import nanmax, TensorNanMax from", "TensorNanMax from .min import min, TensorMin from .nanmin import nanmin, TensorNanMin from .all", "from ..core import Tensor setattr(Tensor, 'sum', sum) setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max)", "import prod, TensorProd from .nanprod import nanprod, TensorNanProd from .max import max, TensorMax", "import any, TensorAny from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean import", "Ltd. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "import count_nonzero, TensorCountNonzero from .allclose import allclose from .array_equal import array_equal def _install():", "under the License. from .sum import sum, TensorSum from .nansum import nansum, TensorNanSum", "cumsum) setattr(Tensor, 'cumprod', cumprod) setattr(Tensor, 'var', var) setattr(Tensor, 'std', std) _install() del _install", "or agreed to in writing, software # distributed under the License is distributed", "TensorSum from .nansum import nansum, TensorNanSum from .prod import prod, TensorProd from .nanprod", "TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import nanargmax, TensorNanArgmax, \\ TensorNanArgmaxMap, TensorNanArgmaxCombine from .argmin import", "TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum,", ".nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero from .allclose import allclose", ".count_nonzero import count_nonzero, TensorCountNonzero from .allclose import allclose from .array_equal import array_equal def", "import argmin, TensorArgmin, TensorArgminMap, TensorArgminCombine from .nanargmin import nanargmin, TensorNanArgmin, \\ TensorNanArgminMap, TensorNanArgminCombine", "or implied. # See the License for the specific language governing permissions and", "TensorProd from .nanprod import nanprod, TensorNanProd from .max import max, TensorMax from .nanmax", "min, TensorMin from .nanmin import nanmin, TensorNanMin from .all import all, TensorAll from", "TensorMomentCombine from .std import std from .nanvar import nanvar, TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap,", "setattr(Tensor, 'prod', prod) setattr(Tensor, 'max', max) setattr(Tensor, 'min', min) setattr(Tensor, 'all', all) setattr(Tensor,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "governing permissions and # limitations under the License. from .sum import sum, TensorSum", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "import nanmin, TensorNanMin from .all import all, TensorAll from .any import any, TensorAny", "TensorNanVar, TensorNanMoment, \\ TensorNanMomentMap, TensorNanMomentCombine from .nanstd import nanstd from .nancumsum import nancumsum,", "count_nonzero, TensorCountNonzero from .allclose import allclose from .array_equal import array_equal def _install(): from", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "import nancumsum, TensorNanCumsum from .nancumprod import nancumprod, TensorNanCumprod from .count_nonzero import count_nonzero, TensorCountNonzero", "TensorMax from .nanmax import nanmax, TensorNanMax from .min import min, TensorMin from .nanmin", "mean) setattr(Tensor, 'argmax', argmax) setattr(Tensor, 'argmin', argmin) setattr(Tensor, 'cumsum', cumsum) setattr(Tensor, 'cumprod', cumprod)", "with the License. # You may obtain a copy of the License at", "from .cumprod import cumprod, TensorCumprod from .var import var, TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine", "TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine from .nanargmax import", ".nanprod import nanprod, TensorNanProd from .max import max, TensorMax from .nanmax import nanmax,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "import nanmax, TensorNanMax from .min import min, TensorMin from .nanmin import nanmin, TensorNanMin", "from .sum import sum, TensorSum from .nansum import nansum, TensorNanSum from .prod import", ".any import any, TensorAny from .mean import mean, TensorMean, TensorMeanChunk, TensorMeanCombine from .nanmean", "in writing, software # distributed under the License is distributed on an \"AS", "TensorMeanCombine from .nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax,", "1999-2018 Alibaba Group Holding Ltd. # # Licensed under the Apache License, Version", ".nanmean import nanmean, TensorNanMean, TensorNanMeanChunk, TensorMeanCombine from .argmax import argmax, TensorArgmax, TensorArgmaxMap, TensorArgmaxCombine", "TensorNanArgminMap, TensorNanArgminCombine from .cumsum import cumsum, TensorCumsum from .cumprod import cumprod, TensorCumprod from", "'min', min) setattr(Tensor, 'all', all) setattr(Tensor, 'any', any) setattr(Tensor, 'mean', mean) setattr(Tensor, 'argmax',", "TensorVar, TensorMoment, TensorMomentMap, TensorMomentCombine from .std import std from .nanvar import nanvar, TensorNanVar,", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "coding: utf-8 -*- import discord from classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION", "by %s\" % (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested", "else: if cmd.action == \"status\": if len(cmd.args) == 0: await message.channel.send(\"Try with an", "command next time.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower()", "None self.cdb.log_info(\"Erasing bot's game requested by %s\" % (str(cmd.author)), message) else: self.game =", "import discord from classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION = \"Change the", "for this command next time.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\")", "\"status\": if len(cmd.args) == 0: await message.channel.send(\"Try with an argument for this command", "{\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status", "have the right to do that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP %s,", "elif cmd.action == \"game\": if len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing bot's", "= \"Change the bot status and his played game on discord\" USAGE =", "next time.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in", "discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game = None cdb.reserve_keywords([\"status\", \"game\"],", "self.game = None self.cdb.log_info(\"Erasing bot's game requested by %s\" % (str(cmd.author)), message) else:", "cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd): if not cmd.triggered \\ or cmd.action", "if len(cmd.args) == 0: await message.channel.send(\"Try with an argument for this command next", "message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\" % (str(cmd.author)),", "NAME) async def on_message(self, message, cmd): if not cmd.triggered \\ or cmd.action not", "don't have the right to do that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP", "= None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async", "by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not", "self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\":", "cmd): if not cmd.triggered \\ or cmd.action not in [\"status\", \"game\"]: return if", "elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested by %s\" %", "not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right to do that.\") self.cdb.log_warn(\"Changing bot", "if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right to do that.\") self.cdb.log_warn(\"Changing", "\"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game = None cdb.reserve_keywords([\"status\",", "\"game\": if len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing bot's game requested by", "online, offline, idle, dnd, invisible.\") elif cmd.action == \"game\": if len(cmd.args) == 0:", "USAGE = {} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online,", "discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game", "def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\":", "a valid argument.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.action", "not in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the", "discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\" % (str(cmd.author)), message) await self.cdb.change_presence(game=self.game, status=self.status)", "message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.action == \"game\": if len(cmd.args)", "= None self.cdb.log_info(\"Erasing bot's game requested by %s\" % (str(cmd.author)), message) else: self.game", "cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd): if", "%s, FAILED\" % (str(cmd.author)), message) else: if cmd.action == \"status\": if len(cmd.args) ==", "%s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await", "bot status requested by NON-OP %s, FAILED\" % (str(cmd.author)), message) else: if cmd.action", "requested by %s\" % (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- import discord from classes.Plugin import Plugin", "argument for this command next time.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd,", "on_message(self, message, cmd): if not cmd.triggered \\ or cmd.action not in [\"status\", \"game\"]:", "== \"status\": if len(cmd.args) == 0: await message.channel.send(\"Try with an argument for this", "self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid arguments:", "async def on_message(self, message, cmd): if not cmd.triggered \\ or cmd.action not in", "discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game = None", "self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self,", "do that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP %s, FAILED\" % (str(cmd.author)), message)", "[\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right to", "to %s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else:", "await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.action == \"game\": if", "on discord\" USAGE = {} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict =", "cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd): if not cmd.triggered \\", "%s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a", "bot's game requested by %s\" % (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change", "message.channel.send(\"Try with an argument for this command next time.\") await message.channel.send(\"Valid arguments: online,", "right to do that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP %s, FAILED\" %", "message.channel.send(\"You don't have the right to do that.\") self.cdb.log_warn(\"Changing bot status requested by", "not cmd.triggered \\ or cmd.action not in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author):", "message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\")", "discord.Status.invisible} self.status = None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE,", "or cmd.action not in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't", "= {} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\":", "status requested by NON-OP %s, FAILED\" % (str(cmd.author)), message) else: if cmd.action ==", "arguments: online, offline, idle, dnd, invisible.\") elif cmd.action == \"game\": if len(cmd.args) ==", "cmd.triggered \\ or cmd.action not in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await", "{} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline,", "and his played game on discord\" USAGE = {} class StatusPlugin(Plugin): def __init__(self,", "cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\":", "if len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing bot's game requested by %s\"", "if not cmd.triggered \\ or cmd.action not in [\"status\", \"game\"]: return if not", "game on discord\" USAGE = {} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict", "\"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game =", "await message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd,", "argument.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.action == \"game\":", "self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message)", "NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd): if not cmd.triggered \\ or", "NON-OP %s, FAILED\" % (str(cmd.author)), message) else: if cmd.action == \"status\": if len(cmd.args)", "await message.channel.send(\"You don't have the right to do that.\") self.cdb.log_warn(\"Changing bot status requested", "game requested by %s\" % (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's", "to do that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP %s, FAILED\" % (str(cmd.author)),", "cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested by %s\" % (cmd.args[0].lower(),", "invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested by %s\"", "with an argument for this command next time.\") await message.channel.send(\"Valid arguments: online, offline,", "0: self.game = None self.cdb.log_info(\"Erasing bot's game requested by %s\" % (str(cmd.author)), message)", "cmd.action == \"status\": if len(cmd.args) == 0: await message.channel.send(\"Try with an argument for", "discord\" USAGE = {} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\":", "message) else: if cmd.action == \"status\": if len(cmd.args) == 0: await message.channel.send(\"Try with", "played game on discord\" USAGE = {} class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb)", "discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status =", "StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle,", "python3 # -*- coding: utf-8 -*- import discord from classes.Plugin import Plugin NAME", "(str(cmd.author)), message) else: if cmd.action == \"status\": if len(cmd.args) == 0: await message.channel.send(\"Try", "in self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)),", "bot's status to %s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status =", "FAILED\" % (str(cmd.author)), message) else: if cmd.action == \"status\": if len(cmd.args) == 0:", "DESCRIPTION = \"Change the bot status and his played game on discord\" USAGE", "def on_message(self, message, cmd): if not cmd.triggered \\ or cmd.action not in [\"status\",", "requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's", "if cmd.action == \"status\": if len(cmd.args) == 0: await message.channel.send(\"Try with an argument", "== 0: await message.channel.send(\"Try with an argument for this command next time.\") await", "len(cmd.args) == 0: await message.channel.send(\"Try with an argument for this command next time.\")", "self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right to do that.\") self.cdb.log_warn(\"Changing bot status", "offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to %s", "requested by NON-OP %s, FAILED\" % (str(cmd.author)), message) else: if cmd.action == \"status\":", "# -*- coding: utf-8 -*- import discord from classes.Plugin import Plugin NAME =", "message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's", "None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def", "message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid", "\"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd): if not", "the bot status and his played game on discord\" USAGE = {} class", "cmd.action == \"game\": if len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing bot's game", "= None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message,", "\"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right to do", "the right to do that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP %s, FAILED\"", "not a valid argument.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif", "bot status and his played game on discord\" USAGE = {} class StatusPlugin(Plugin):", "discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION,", "\"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd): if not cmd.triggered", "__init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb,", "\"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None", "idle, dnd, invisible.\") elif cmd.action == \"game\": if len(cmd.args) == 0: self.game =", "0: await message.channel.send(\"Try with an argument for this command next time.\") await message.channel.send(\"Valid", "class StatusPlugin(Plugin): def __init__(self, cdb): super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\":", "this command next time.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif", "discord from classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION = \"Change the bot", "an argument for this command next time.\") await message.channel.send(\"Valid arguments: online, offline, idle,", "% (str(cmd.author)), message) else: if cmd.action == \"status\": if len(cmd.args) == 0: await", "\"Status\" DESCRIPTION = \"Change the bot status and his played game on discord\"", "else: await message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid arguments: online, offline, idle,", "% (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid", "classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION = \"Change the bot status and", "message, cmd): if not cmd.triggered \\ or cmd.action not in [\"status\", \"game\"]: return", "self.cdb.log_info(\"Erasing bot's game requested by %s\" % (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:])", "import Plugin NAME = \"Status\" DESCRIPTION = \"Change the bot status and his", "cmd.action not in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have", "-*- import discord from classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION = \"Change", "self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid arguments: online, offline,", "in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right", "== 0: self.game = None self.cdb.log_info(\"Erasing bot's game requested by %s\" % (str(cmd.author)),", "status and his played game on discord\" USAGE = {} class StatusPlugin(Plugin): def", "super().__init__(cdb) self.status_dict = {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb,", "valid argument.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.action ==", "utf-8 -*- import discord from classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION =", "len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing bot's game requested by %s\" %", "% (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\"", "= {\"online\": discord.Status.online, \"offline\": discord.Status.offline, \"idle\": discord.Status.idle, \"dnd\": discord.Status.do_not_disturb, \"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible}", "else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\" % (str(cmd.author)), message)", "= \"Status\" DESCRIPTION = \"Change the bot status and his played game on", "%s\" % (str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by", "arguments: online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status", "NAME = \"Status\" DESCRIPTION = \"Change the bot status and his played game", "(str(cmd.author)), message) else: self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\" %", "status to %s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()]", "\\ or cmd.action not in [\"status\", \"game\"]: return if not self.cdb.isop_user(message.author): await message.channel.send(\"You", "online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to", "invisible.\") elif cmd.action == \"game\": if len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing", "\"Change the bot status and his played game on discord\" USAGE = {}", "== \"game\": if len(cmd.args) == 0: self.game = None self.cdb.log_info(\"Erasing bot's game requested", "Plugin NAME = \"Status\" DESCRIPTION = \"Change the bot status and his played", "\"invisible\": discord.Status.invisible} self.status = None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME)", "await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change", "= self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid argument.\") await message.channel.send(\"Valid arguments: online,", "= discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\" % (str(cmd.author)), message) await self.cdb.change_presence(game=self.game,", "that.\") self.cdb.log_warn(\"Changing bot status requested by NON-OP %s, FAILED\" % (str(cmd.author)), message) else:", "None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME) async def on_message(self, message, cmd):", "dnd, invisible.\") elif cmd.action == \"game\": if len(cmd.args) == 0: self.game = None", "offline, idle, dnd, invisible.\") elif cmd.action == \"game\": if len(cmd.args) == 0: self.game", "time.\") await message.channel.send(\"Valid arguments: online, offline, idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict:", "self.status = None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\") cdb.add_plugin_description(DESCRIPTION, NAME) cdb.add_plugin_usage(USAGE, NAME)", "his played game on discord\" USAGE = {} class StatusPlugin(Plugin): def __init__(self, cdb):", "-*- coding: utf-8 -*- import discord from classes.Plugin import Plugin NAME = \"Status\"", "return if not self.cdb.isop_user(message.author): await message.channel.send(\"You don't have the right to do that.\")", "\"do_not_disturb\": discord.Status.do_not_disturb, \"invisible\": discord.Status.invisible} self.status = None self.game = None cdb.reserve_keywords([\"status\", \"game\"], \"Status\")", "self.cdb.log_info(\"Change bot's status to %s requested by %s\" % (cmd.args[0].lower(), str(cmd.author)), message) self.status", "(cmd.args[0].lower(), str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid argument.\")", "by NON-OP %s, FAILED\" % (str(cmd.author)), message) else: if cmd.action == \"status\": if", "dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested by", "self.game = discord.Game(name=message.content[6:]) self.cdb.log_info(\"Change bot's game requested by %s\" % (str(cmd.author)), message) await", "await message.channel.send(\"Try with an argument for this command next time.\") await message.channel.send(\"Valid arguments:", "from classes.Plugin import Plugin NAME = \"Status\" DESCRIPTION = \"Change the bot status", "idle, dnd, invisible.\") elif cmd.args[0].lower() in self.status_dict: self.cdb.log_info(\"Change bot's status to %s requested", "self.cdb.log_warn(\"Changing bot status requested by NON-OP %s, FAILED\" % (str(cmd.author)), message) else: if", "str(cmd.author)), message) self.status = self.status_dict[cmd.args[0].lower()] else: await message.channel.send(\"It's not a valid argument.\") await" ]
[ "regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from ._models_py3 import LocalizableString from", "Changes may cause incorrect behavior and will be lost if the code is", "MetricCollection from ._models_py3 import MetricSettings from ._models_py3 import MetricValue from ._models_py3 import Resource", "Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect", "if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from", "except (SyntaxError, ImportError): from ._models import ErrorResponse # type: ignore from ._models import", "._models_py3 import ErrorResponse from ._models_py3 import LocalizableString from ._models_py3 import LogSettings from ._models_py3", "RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from", "Metric # type: ignore from ._models import MetricCollection # type: ignore from ._models", "the project root for license information. # Code generated by Microsoft (R) AutoRest", "from ._models_py3 import Metric from ._models_py3 import MetricCollection from ._models_py3 import MetricSettings from", "from ._models_py3 import MetricCollection from ._models_py3 import MetricSettings from ._models_py3 import MetricValue from", "License. See License.txt in the project root for license information. # Code generated", "type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import (", "._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import ( Unit, ) __all__", "'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue', 'Resource', 'RetentionPolicy', 'ServiceDiagnosticSettingsResource', 'ServiceDiagnosticSettingsResourcePatch', 'Unit', ]", "MIT License. See License.txt in the project root for license information. # Code", "._models import LocalizableString # type: ignore from ._models import LogSettings # type: ignore", "type: ignore from ._models import RetentionPolicy # type: ignore from ._models import ServiceDiagnosticSettingsResource", "import ServiceDiagnosticSettingsResource # type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from", "# Licensed under the MIT License. See License.txt in the project root for", "LocalizableString from ._models_py3 import LogSettings from ._models_py3 import Metric from ._models_py3 import MetricCollection", "import Metric # type: ignore from ._models import MetricCollection # type: ignore from", "License.txt in the project root for license information. # Code generated by Microsoft", "ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import ErrorResponse # type: ignore from ._models", "import LogSettings # type: ignore from ._models import Metric # type: ignore from", "._models import Resource # type: ignore from ._models import RetentionPolicy # type: ignore", "import ErrorResponse from ._models_py3 import LocalizableString from ._models_py3 import LogSettings from ._models_py3 import", "._models import ServiceDiagnosticSettingsResource # type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore", "under the MIT License. See License.txt in the project root for license information.", "reserved. # Licensed under the MIT License. See License.txt in the project root", "# type: ignore from ._models import MetricValue # type: ignore from ._models import", "._models import MetricSettings # type: ignore from ._models import MetricValue # type: ignore", "ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import ( Unit, ) __all__ = [", "type: ignore from ._monitor_client_enums import ( Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString',", "# type: ignore from ._models import RetentionPolicy # type: ignore from ._models import", "import MetricSettings from ._models_py3 import MetricValue from ._models_py3 import Resource from ._models_py3 import", "# type: ignore from ._models import ServiceDiagnosticSettingsResource # type: ignore from ._models import", "from ._monitor_client_enums import ( Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric',", "MetricValue from ._models_py3 import Resource from ._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource", "from ._models import ServiceDiagnosticSettingsResource # type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type:", "AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost", "Resource from ._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch", "._models import RetentionPolicy # type: ignore from ._models import ServiceDiagnosticSettingsResource # type: ignore", "type: ignore from ._models import ServiceDiagnosticSettingsResource # type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch", "# type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import", "cause incorrect behavior and will be lost if the code is regenerated. #", "(R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be", "ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import ErrorResponse #", "and will be lost if the code is regenerated. # -------------------------------------------------------------------------- try: from", "code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from ._models_py3 import", "from ._models_py3 import LocalizableString from ._models_py3 import LogSettings from ._models_py3 import Metric from", "Metric from ._models_py3 import MetricCollection from ._models_py3 import MetricSettings from ._models_py3 import MetricValue", "from ._models import LocalizableString # type: ignore from ._models import LogSettings # type:", "ignore from ._models import RetentionPolicy # type: ignore from ._models import ServiceDiagnosticSettingsResource #", "from ._models import RetentionPolicy # type: ignore from ._models import ServiceDiagnosticSettingsResource # type:", "import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import ErrorResponse # type: ignore from", "# type: ignore from ._monitor_client_enums import ( Unit, ) __all__ = [ 'ErrorResponse',", "from ._models import LogSettings # type: ignore from ._models import Metric # type:", "Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue', 'Resource',", "from ._models import MetricCollection # type: ignore from ._models import MetricSettings # type:", "by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and", "[ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue', 'Resource', 'RetentionPolicy', 'ServiceDiagnosticSettingsResource', 'ServiceDiagnosticSettingsResourcePatch', 'Unit',", "import MetricValue # type: ignore from ._models import Resource # type: ignore from", "-------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from ._models_py3 import LocalizableString from ._models_py3 import", "ErrorResponse # type: ignore from ._models import LocalizableString # type: ignore from ._models", "(SyntaxError, ImportError): from ._models import ErrorResponse # type: ignore from ._models import LocalizableString", "import MetricValue from ._models_py3 import Resource from ._models_py3 import RetentionPolicy from ._models_py3 import", "Generator. # Changes may cause incorrect behavior and will be lost if the", "._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError,", "ignore from ._monitor_client_enums import ( Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings',", "MetricSettings from ._models_py3 import MetricValue from ._models_py3 import Resource from ._models_py3 import RetentionPolicy", "type: ignore from ._models import Resource # type: ignore from ._models import RetentionPolicy", "ignore from ._models import Metric # type: ignore from ._models import MetricCollection #", "= [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue', 'Resource', 'RetentionPolicy', 'ServiceDiagnosticSettingsResource', 'ServiceDiagnosticSettingsResourcePatch',", "MetricValue # type: ignore from ._models import Resource # type: ignore from ._models", "import MetricCollection from ._models_py3 import MetricSettings from ._models_py3 import MetricValue from ._models_py3 import", "from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import ErrorResponse # type:", "._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import", "Licensed under the MIT License. See License.txt in the project root for license", "LogSettings # type: ignore from ._models import Metric # type: ignore from ._models", "type: ignore from ._models import MetricCollection # type: ignore from ._models import MetricSettings", "type: ignore from ._models import Metric # type: ignore from ._models import MetricCollection", "import RetentionPolicy # type: ignore from ._models import ServiceDiagnosticSettingsResource # type: ignore from", "import ErrorResponse # type: ignore from ._models import LocalizableString # type: ignore from", "See License.txt in the project root for license information. # Code generated by", "from ._models_py3 import MetricValue from ._models_py3 import Resource from ._models_py3 import RetentionPolicy from", "lost if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse", "from ._models import ErrorResponse # type: ignore from ._models import LocalizableString # type:", "license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes", "type: ignore from ._models import LogSettings # type: ignore from ._models import Metric", "# type: ignore from ._models import MetricSettings # type: ignore from ._models import", "( Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue',", "type: ignore from ._models import MetricSettings # type: ignore from ._models import MetricValue", ") __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue', 'Resource', 'RetentionPolicy',", "Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in", "from ._models_py3 import Resource from ._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from", "incorrect behavior and will be lost if the code is regenerated. # --------------------------------------------------------------------------", "._models import MetricValue # type: ignore from ._models import Resource # type: ignore", "._models_py3 import Metric from ._models_py3 import MetricCollection from ._models_py3 import MetricSettings from ._models_py3", "ignore from ._models import LogSettings # type: ignore from ._models import Metric #", "coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "._models import ErrorResponse # type: ignore from ._models import LocalizableString # type: ignore", "import Resource # type: ignore from ._models import RetentionPolicy # type: ignore from", "ignore from ._models import Resource # type: ignore from ._models import RetentionPolicy #", "LocalizableString # type: ignore from ._models import LogSettings # type: ignore from ._models", "._models_py3 import MetricValue from ._models_py3 import Resource from ._models_py3 import RetentionPolicy from ._models_py3", "from ._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except", "import LocalizableString # type: ignore from ._models import LogSettings # type: ignore from", "ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import ( Unit,", "type: ignore from ._models import LocalizableString # type: ignore from ._models import LogSettings", "LogSettings from ._models_py3 import Metric from ._models_py3 import MetricCollection from ._models_py3 import MetricSettings", "rights reserved. # Licensed under the MIT License. See License.txt in the project", "from ._models_py3 import LogSettings from ._models_py3 import Metric from ._models_py3 import MetricCollection from", "# type: ignore from ._models import Resource # type: ignore from ._models import", "._models import MetricCollection # type: ignore from ._models import MetricSettings # type: ignore", "import MetricCollection # type: ignore from ._models import MetricSettings # type: ignore from", "may cause incorrect behavior and will be lost if the code is regenerated.", "# -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from ._models_py3 import LocalizableString from ._models_py3", "import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import ErrorResponse", "MetricCollection # type: ignore from ._models import MetricSettings # type: ignore from ._models", "the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from ._models_py3", "ignore from ._models import MetricSettings # type: ignore from ._models import MetricValue #", "will be lost if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3", "type: ignore from ._models import MetricValue # type: ignore from ._models import Resource", "# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause", "ServiceDiagnosticSettingsResource # type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums", "information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may", "from ._models import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import ( Unit, )", "ErrorResponse from ._models_py3 import LocalizableString from ._models_py3 import LogSettings from ._models_py3 import Metric", "._models import LogSettings # type: ignore from ._models import Metric # type: ignore", "be lost if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import", "ignore from ._models import MetricValue # type: ignore from ._models import Resource #", "ignore from ._models import MetricCollection # type: ignore from ._models import MetricSettings #", "import Resource from ._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import", "All rights reserved. # Licensed under the MIT License. See License.txt in the", "import LogSettings from ._models_py3 import Metric from ._models_py3 import MetricCollection from ._models_py3 import", "(c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt", "-------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the", "ImportError): from ._models import ErrorResponse # type: ignore from ._models import LocalizableString #", "import Metric from ._models_py3 import MetricCollection from ._models_py3 import MetricSettings from ._models_py3 import", "Code Generator. # Changes may cause incorrect behavior and will be lost if", "ignore from ._models import ServiceDiagnosticSettingsResource # type: ignore from ._models import ServiceDiagnosticSettingsResourcePatch #", "for license information. # Code generated by Microsoft (R) AutoRest Code Generator. #", "behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- try:", "# type: ignore from ._models import MetricCollection # type: ignore from ._models import", "RetentionPolicy # type: ignore from ._models import ServiceDiagnosticSettingsResource # type: ignore from ._models", "import ( Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings',", "._models_py3 import LocalizableString from ._models_py3 import LogSettings from ._models_py3 import Metric from ._models_py3", "._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models import ErrorResponse # type: ignore", "._models import Metric # type: ignore from ._models import MetricCollection # type: ignore", "generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior", "._monitor_client_enums import ( Unit, ) __all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection',", "is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import ErrorResponse from ._models_py3 import LocalizableString", "import ServiceDiagnosticSettingsResourcePatch # type: ignore from ._monitor_client_enums import ( Unit, ) __all__ =", "from ._models import Metric # type: ignore from ._models import MetricCollection # type:", "from ._models_py3 import MetricSettings from ._models_py3 import MetricValue from ._models_py3 import Resource from", "from ._models import MetricValue # type: ignore from ._models import Resource # type:", "._models_py3 import LogSettings from ._models_py3 import Metric from ._models_py3 import MetricCollection from ._models_py3", "from ._models_py3 import ErrorResponse from ._models_py3 import LocalizableString from ._models_py3 import LogSettings from", "._models_py3 import Resource from ._models_py3 import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3", "import MetricSettings # type: ignore from ._models import MetricValue # type: ignore from", "MetricSettings # type: ignore from ._models import MetricValue # type: ignore from ._models", "try: from ._models_py3 import ErrorResponse from ._models_py3 import LocalizableString from ._models_py3 import LogSettings", "project root for license information. # Code generated by Microsoft (R) AutoRest Code", "import RetentionPolicy from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError):", "ignore from ._models import LocalizableString # type: ignore from ._models import LogSettings #", "._models_py3 import MetricSettings from ._models_py3 import MetricValue from ._models_py3 import Resource from ._models_py3", "__all__ = [ 'ErrorResponse', 'LocalizableString', 'LogSettings', 'Metric', 'MetricCollection', 'MetricSettings', 'MetricValue', 'Resource', 'RetentionPolicy', 'ServiceDiagnosticSettingsResource',", "in the project root for license information. # Code generated by Microsoft (R)", "<reponame>vbarbaresi/azure-sdk-for-python<gh_stars>1-10 # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", "Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will", "from ._models_py3 import ServiceDiagnosticSettingsResource from ._models_py3 import ServiceDiagnosticSettingsResourcePatch except (SyntaxError, ImportError): from ._models", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License.", "import LocalizableString from ._models_py3 import LogSettings from ._models_py3 import Metric from ._models_py3 import", "# type: ignore from ._models import LogSettings # type: ignore from ._models import", "._models_py3 import MetricCollection from ._models_py3 import MetricSettings from ._models_py3 import MetricValue from ._models_py3", "root for license information. # Code generated by Microsoft (R) AutoRest Code Generator.", "from ._models import Resource # type: ignore from ._models import RetentionPolicy # type:", "# type: ignore from ._models import LocalizableString # type: ignore from ._models import", "Resource # type: ignore from ._models import RetentionPolicy # type: ignore from ._models", "from ._models import MetricSettings # type: ignore from ._models import MetricValue # type:", "# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "the MIT License. See License.txt in the project root for license information. #", "# type: ignore from ._models import Metric # type: ignore from ._models import", "# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "# Changes may cause incorrect behavior and will be lost if the code" ]
[ "but now it should succeed in adding an alternate # hostname entry. name", "[]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an existing", "with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks),", "'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1 def test_created(self):", "'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result': False, 'comment':", "name = 'server1' other_name = 'server1' ret = {'name': name, 'result': True, 'comment':", "self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result': True,", "name, 'result': True, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name], [], [], [],", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2 under gluster 3.7.x", "= ('Volume {0} will be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with", "ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "1 def test_started(self): ''' Test to check if volume has been started '''", "cases for salt.states.glusterfs ''' # 'peered' function tests: 1 def test_peered(self): ''' Test", "= ('Volume {0} is already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with", "salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK,", ":email:`<NAME> <<EMAIL>>` ''' # Import Python libs from __future__ import absolute_import # Import", "'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} mock", "True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1", "'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result': True,", "'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)):", "absolute_import # Import Salt Testing Libs from salttesting import skipIf, TestCase from salttesting.mock", "{name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an", "self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): ''' Test to add", "self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be started'.format(name))", "= MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already", "added', 'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment':", "{name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2", "probing of server1 by server2 used to result in # \"success_already_peer\" but now", "verify if node is peered. ''' name = 'server1' other_name = 'server1' ret", "30932: Peering an existing server by IP fails with gluster 3.7+ # #", "mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret)", "'created' function tests: 1 def test_created(self): ''' Test to check if volume already", "('Volume {0} will be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with", "mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) #", "'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): ''' Test", "self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added', 'result': True, 'changes': {'new': ['host1'],", "in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual(", "'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__': from integration import", "{'name': name, 'result': True, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name], [], [],", "MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks':", "ret) # 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): ''' Test to add brick(s)", "TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath", "= MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock,", "''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python libs from __future__ import absolute_import", "salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from salt.states import glusterfs from", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an existing server by IP", "'started' function tests: 1 def test_started(self): ''' Test to check if volume has", "Import Salt Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs", "('Volume {0} will be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__,", "mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status':", "mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'})", "{'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}", "'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "probed by address, 10.0.0.2. Under 3.4, server1 would be # known as 10.0.0.1", "'bricks successfully added', 'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks),", "{'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does not exist'.format(name))", "existing server by IP fails with gluster 3.7+ # # server2 was probed", "def test_add_volume_bricks(self): ''' Test to add brick(s) to an existing volume ''' name", "self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters comt = ('Invalid characters in peer", "known as 10.0.0.1 but starting with 3.7, its hostname of server1 would be", "if node is peered. ''' name = 'server1' other_name = 'server1' ret =", "= MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict,", "Python libs from __future__ import absolute_import # Import Salt Testing Libs from salttesting", "'/srv/gluster/drive2'} ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} mock =", "= 'server1' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} #", "with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers':", "already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with", "patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in volume name.') ret.update({'comment': comt, 'result':", "has been started ''' name = 'salt' ret = {'name': name, 'result': False,", "probe already existing server2 under gluster 3.4.x comt = ('Host {0} already peered'.format(name))", "NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs ''' # 'peered' function tests:", "MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from salt.states", "mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt", "= 'server1' other_name = 'server1' ret = {'name': name, 'result': True, 'comment': '',", "Test to check if volume already exists ''' name = 'salt' bricks =", "is already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}):", "comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0}", "= {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result': False, 'comment': '', 'changes':", "exist', 'is not started', bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added',", "{name: []}, 'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "= MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}])", "glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test", "by IP fails with gluster 3.7+ # # server2 was probed by address,", "invalid characters comt = ('Invalid characters in peer name.') ret.update({'name': '#badhostname', 'comment': comt,", "'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip:", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment':", "peer server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "comt = ('Volume {0} is already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret)", "= MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with", "'/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}", "('Volume {0} is already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__,", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already peered'.format(ip)) ret.update({'name':", "if volume already exists ''' name = 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2':", "class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs ''' # 'peered' function tests: 1", "patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}):", "hostname of server1 would be # known instead. Subsequent probing of server1 by", "= ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new': {name: []},", "ensure_in_syspath('../../') # Import Salt Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults", "server1 by server2 used to result in # \"success_already_peer\" but now it should", "self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt =", "from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as", "comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': { 'old':", "comt, 'changes': { 'old': {name: []}, 'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time)", "{}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret)", "False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in volume name.') ret.update({'comment':", "ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added', 'result':", "'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): ''' Test to", "return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers':", "'old': {name: []}, 'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip:", "mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name:", "MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932:", "ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) #", "= ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': { 'old': {name:", "name = 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result':", "patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt})", "import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON)", "server by IP fails with gluster 3.7+ # # server2 was probed by", "ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__': from", "function tests: 1 def test_created(self): ''' Test to check if volume already exists", "an existing server by IP fails with gluster 3.7+ # # server2 was", "mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an existing server by IP fails", "''' # Import Python libs from __future__ import absolute_import # Import Salt Testing", "'result': True, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name], [], [], [], [name]])", "comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__,", "= ('Invalid characters in peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'),", "import absolute_import # Import Salt Testing Libs from salttesting import skipIf, TestCase from", "gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name:", "'started', 'result': True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function", "'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in volume', 'changes': {}})", "'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does", "used to result in # \"success_already_peer\" but now it should succeed in adding", "1 def test_created(self): ''' Test to check if volume already exists ''' name", "{} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs ''' # 'peered'", "{'name': name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does not exist',", "[], 'result': True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started'", "peered. ''' name = 'server1' other_name = 'server1' ret = {'name': name, 'result':", "False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__': from integration import run_tests", "'#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1 def", "-*- ''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python libs from __future__ import", "volume ''' name = 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name':", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2 under gluster 3.7.x mock_xml", "import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test", "return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__,", "test for invalid characters comt = ('Invalid characters in peer name.') ret.update({'name': '#badhostname',", "# 'peered' function tests: 1 def test_peered(self): ''' Test to verify if node", "Salt Testing Libs from salttesting import skipIf, TestCase from salttesting.mock import ( NO_MOCK,", "'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does", "('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock(", "characters comt = ('Invalid characters in peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result':", "= ('Volume {0} will be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret)", "test_add_volume_bricks(self): ''' Test to add brick(s) to an existing volume ''' name =", "return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__,", "True}): comt = ('Volume {0} will be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name),", "'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does not exist'.format(name)) ret.update({'comment': comt})", "[name], [name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes':", "existing server2 under gluster 3.4.x comt = ('Host {0} already peered'.format(name)) ret.update({'comment': comt,", "patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name),", "('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'], 'old': []}})", "glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added', 'result': True, 'changes': {'new': ['host1'], 'old':", "function tests: 1 def test_add_volume_bricks(self): ''' Test to add brick(s) to an existing", "self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes':", "'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1 def", "tests: 1 def test_started(self): ''' Test to check if volume has been started", "self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an existing server by IP fails with", "volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name,", "''' name = 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name':", "# probe already existing server2 under gluster 3.4.x comt = ('Host {0} already", "exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume", "added', 'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment':", "import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import", "self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started',", "Libs from salttesting import skipIf, TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock,", "peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function", "True, 'comment': '', 'changes': {}} # probe new peer server2 under gluster 3.4.x", "under gluster 3.4.x comt = ('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}})", "{'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for", "= 'server1' ip = '10.0.0.1' comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip,", "[], [], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt", "glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__': from integration import run_tests run_tests(GlusterfsTestCase, needs_daemon=False)", "__future__ import absolute_import # Import Salt Testing Libs from salttesting import skipIf, TestCase", "not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is already started'.format(name))", "10.0.0.2. Under 3.4, server1 would be # known as 10.0.0.1 but starting with", "already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False})", "MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.7.x", "Import Salt Testing Libs from salttesting import skipIf, TestCase from salttesting.mock import (", "{'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests: 1 def", "bricks), ret) comt = ('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes':", "<<EMAIL>>` ''' # Import Python libs from __future__ import absolute_import # Import Salt", "{}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name:", "should succeed in adding an alternate # hostname entry. name = 'server1' ip", "does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is already", "''' Test to add brick(s) to an existing volume ''' name = 'salt'", "3.7+ # # server2 was probed by address, 10.0.0.2. Under 3.4, server1 would", "MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer", "salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud", "# 'created' function tests: 1 def test_created(self): ''' Test to check if volume", "self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests: 1 def test_started(self): ''' Test to", "Salt Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as", "coding: utf-8 -*- ''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python libs from", "server2 used to result in # \"success_already_peer\" but now it should succeed in", "self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.4.x comt = ('Host", "mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks),", "= {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result': True, 'comment': '',", "{ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added", "ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment':", "comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0} already peered'.format(name)) ret.update({'comment':", "'result': True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function", "successfully added', 'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret)", "[ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []},", "MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with", "'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): ''' Test to add brick(s) to an", "{'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name,", "'comment': '', 'changes': {}} # probe new peer server2 under gluster 3.4.x comt", "{name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2", "gluster 3.7+ # # server2 was probed by address, 10.0.0.2. Under 3.4, server1", "# test for invalid characters comt = ('Invalid characters in peer name.') ret.update({'name':", "{'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment':", "to add brick(s) to an existing volume ''' name = 'salt' bricks =", "tests: 1 def test_created(self): ''' Test to check if volume already exists '''", "import GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__", "1 def test_peered(self): ''' Test to verify if node is peered. ''' name", "\"success_already_peer\" but now it should succeed in adding an alternate # hostname entry.", "[name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock,", "'result': True, 'comment': '', 'changes': {}} # probe new peer server2 under gluster", "ret) comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {", "ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test import", "= MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}])", "mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.4.x comt =", "'', 'changes': {}} # probe new peer server2 under gluster 3.4.x comt =", "MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with", "ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is already started'.format(name)) ret.update({'comment': comt, 'result':", "comt = ('Volume {0} will be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret)", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.7.x mock_xml", "# \"success_already_peer\" but now it should succeed in adding an alternate # hostname", "characters in volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt =", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters comt = ('Invalid characters", "= {'name': name, 'result': True, 'comment': '', 'changes': {}} # probe new peer", "{}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]},", "= ('Invalid characters in volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret)", "mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt,", "{'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result': True, 'comment': '', 'changes':", "MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt =", "MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__,", "ret) # 'created' function tests: 1 def test_created(self): ''' Test to check if", "volume has been started ''' name = 'salt' ret = {'name': name, 'result':", "function tests: 1 def test_started(self): ''' Test to check if volume has been", "mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) #", "'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "ret) # probe already existing server2 under gluster 3.4.x comt = ('Host {0}", "entry. name = 'server1' ip = '10.0.0.1' comt = ('Host {0} already peered'.format(ip))", "patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}):", "{'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in volume name.')", "probe new peer server2 under gluster 3.4.x comt = ('Peer {0} added successfully.'.format(name))", "bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully", "'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added', 'result': True,", "NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs", "False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t =", "'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name,", "in volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host", "<reponame>stephane-martin/salt-debian-packaging<gh_stars>0 # -*- coding: utf-8 -*- ''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import", "comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not", "{'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in volume',", "ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__", "would be # known as 10.0.0.1 but starting with 3.7, its hostname of", "ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result':", "ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be created'.format(name)) ret.update({'comment':", "glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks", "import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer':", "successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new': {name: []}, 'old': {}}}) mock_xml =", "[]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2 under", "comt = ('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock(", "brick(s) to an existing volume ''' name = 'salt' bricks = {'bricks': {'host1':", "patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret)", "{'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self):", "'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name:", "'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret)", "GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs ''' # 'peered' function tests: 1 def", "Peering an existing server by IP fails with gluster 3.7+ # # server2", "= ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml =", "= {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases", "{name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already", "'is not started', bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks", "{ 'old': {name: []}, 'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__',", "comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid", "other_name = 'server1' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}", "to verify if node is peered. ''' name = 'server1' other_name = 'server1'", "patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}):", "ret) # probe already existing server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname'])", "# 'started' function tests: 1 def test_started(self): ''' Test to check if volume", "patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from salt.states import", "{}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []},", "gluster 3.4.x comt = ('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip,", "{0} will be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__,", "comt = ('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'],", "be # known as 10.0.0.1 but starting with 3.7, its hostname of server1", "('Invalid characters in peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret)", "self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name,", "ret) ret.update({'comment': 'bricks successfully added', 'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual(", "{name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name:", "# -*- coding: utf-8 -*- ''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python", "if volume has been started ''' name = 'salt' ret = {'name': name,", "ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added',", "= 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result': False,", "{}} mock = MagicMock(side_effect=['does not exist', 'is not started', bricks, bricks, bricks, ''])", "{0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is", "ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name],", "ret) # 'started' function tests: 1 def test_started(self): ''' Test to check if", "bricks), ret) # 'started' function tests: 1 def test_started(self): ''' Test to check", "# known as 10.0.0.1 but starting with 3.7, its hostname of server1 would", "ip, 'comment': comt, 'changes': { 'old': {name: []}, 'new': {name: [ip]}}}) mock_xml =", "{'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip),", "self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2 under gluster 3.7.x mock_xml = MagicMock(", "exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is already started'.format(name)) ret.update({'comment':", "skipIf, TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import", "{0} will be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test':", "salt.states.glusterfs ''' # 'peered' function tests: 1 def test_peered(self): ''' Test to verify", "MagicMock(side_effect=['does not exist', 'is not started', bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks", "ret.update({'comment': comt, 'result': True, 'changes': {'new': {name: []}, 'old': {}}}) mock_xml = MagicMock(", "under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "'comment': comt, 'changes': { 'old': {name: []}, 'new': {name: [ip]}}}) mock_xml = MagicMock(", "ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual(", "{0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new': {name: []}, 'old': {}}})", "= MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt =", "Testing Libs from salttesting import skipIf, TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON,", "patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be created'.format(name)) ret.update({'comment': comt, 'result':", "MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer", "already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt", "[]}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests: 1 def test_started(self): ''' Test", "'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): '''", "salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class", "will be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test':", "mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret)", "{'test': False}): ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret)", "mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt", "[]}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added', 'result': True, 'changes': {'new':", "utf-8 -*- ''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python libs from __future__", "'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does not exist', 'is not started', bricks,", "['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in volume', 'changes':", "in # \"success_already_peer\" but now it should succeed in adding an alternate #", "GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ =", "''' Test to check if volume already exists ''' name = 'salt' bricks", "'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests: 1 def test_started(self): '''", "'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks", "'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks),", "3.4.x comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new':", "return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__,", "comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1 def test_created(self): '''", "[]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under", "already existing server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "[ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid", "[name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__,", "peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__',", "self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1 def test_created(self): ''' Test to check", "ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0} already peered'.format(name))", "3.4, server1 would be # known as 10.0.0.1 but starting with 3.7, its", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster", "comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True,", "'server1' other_name = 'server1' ret = {'name': name, 'result': True, 'comment': '', 'changes':", "mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret)", "already exists ''' name = 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret", "= {'name': name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[], [name],", "ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks'", "'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in", "already existing server2 under gluster 3.4.x comt = ('Host {0} already peered'.format(name)) ret.update({'comment':", "'10.0.0.1' comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}})", "{}} mock = MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__,", "= MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with", "{name: []}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__,", "True}): comt = ('Volume {0} will be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name,", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does", "name, 'result': True, 'comment': '', 'changes': {}} # probe new peer server2 under", "'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t", "for salt.states.glusterfs ''' # 'peered' function tests: 1 def test_peered(self): ''' Test to", "# hostname entry. name = 'server1' ip = '10.0.0.1' comt = ('Host {0}", "check if volume has been started ''' name = 'salt' ret = {'name':", "'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started')", "# 'add_volume_bricks' function tests: 1 def test_add_volume_bricks(self): ''' Test to add brick(s) to", "['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in volume', 'changes': {}}) self.assertDictEqual(", "ret.update({'name': ip, 'comment': comt, 'changes': { 'old': {name: []}, 'new': {name: [ip]}}}) mock_xml", "is peered. ''' name = 'server1' other_name = 'server1' ret = {'name': name,", "'']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__,", "as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__", "comt, 'result': True, 'changes': {'new': {name: []}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other)", "self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes':", "'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1 def test_created(self): ''' Test", "mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret)", "False}): ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) #", "starting with 3.7, its hostname of server1 would be # known instead. Subsequent", "'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True, 'change':", "already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks),", "mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0}", "started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt =", "@skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs ''' # 'peered' function", "from __future__ import absolute_import # Import Salt Testing Libs from salttesting import skipIf,", "ret) comt = ('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new':", "mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ =", "import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {}", "'server1' ip = '10.0.0.1' comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment':", "existing server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "Test cases for salt.states.glusterfs ''' # 'peered' function tests: 1 def test_peered(self): '''", "= MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}])", "server2 under gluster 3.4.x comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result':", "3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name:", "server1 would be # known as 10.0.0.1 but starting with 3.7, its hostname", "ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__':", "server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "as 10.0.0.1 but starting with 3.7, its hostname of server1 would be #", "in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'})", "server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing", "volume already exists ''' name = 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'}", "will be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}):", "already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt =", "in adding an alternate # hostname entry. name = 'server1' ip = '10.0.0.1'", "but starting with 3.7, its hostname of server1 would be # known instead.", "'/srv/gluster/drive1'}} ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} mock =", "with gluster 3.7+ # # server2 was probed by address, 10.0.0.2. Under 3.4,", "ret.update({'comment': 'bricks successfully added', 'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name,", "successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': { 'old': {name: []}, 'new': {name: [ip]}}})", ":codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python libs from __future__ import absolute_import #", "succeed in adding an alternate # hostname entry. name = 'server1' ip =", "# server2 was probed by address, 10.0.0.2. Under 3.4, server1 would be #", "ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be started'.format(name)) ret.update({'comment':", "{'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.4.x comt", "patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be started'.format(name)) ret.update({'comment': comt, 'result':", "= MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue", "instead. Subsequent probing of server1 by server2 used to result in # \"success_already_peer\"", "patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name),", "peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "from salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs import", "existing volume ''' name = 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret =", "= MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt", "salttesting import skipIf, TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from", "server2 was probed by address, 10.0.0.2. Under 3.4, server1 would be # known", "patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does not", "with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers':", "'server1' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} # probe", "'', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume", "= {'name': name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does not", "would be # known instead. Subsequent probing of server1 by server2 used to", "gluster 3.4.x comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes':", "True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be", "Test to add brick(s) to an existing volume ''' name = 'salt' bricks", "was probed by address, 10.0.0.2. Under 3.4, server1 would be # known as", "probe already existing server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__',", "[ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already peered'.format(ip))", "by address, 10.0.0.2. Under 3.4, server1 would be # known as 10.0.0.1 but", "successfully added', 'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}):", "its hostname of server1 would be # known instead. Subsequent probing of server1", "server2 under gluster 3.4.x comt = ('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes':", "IP fails with gluster 3.7+ # # server2 was probed by address, 10.0.0.2.", "probe new peer server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__',", "= ('Volume {0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume", "of server1 would be # known instead. Subsequent probing of server1 by server2", "mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status':", "{}} # probe new peer server2 under gluster 3.4.x comt = ('Peer {0}", "( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt", "fails with gluster 3.7+ # # server2 was probed by address, 10.0.0.2. Under", "mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}])", "with 3.7, its hostname of server1 would be # known instead. Subsequent probing", "''' name = 'salt' ret = {'name': name, 'result': False, 'comment': '', 'changes':", "peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret)", "None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True, 'change': {'new':", "'changes': {}} mock = MagicMock(side_effect=['does not exist', 'is not started', bricks, bricks, bricks,", "comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "in peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created'", "mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is", "# probe new peer server2 under gluster 3.4.x comt = ('Peer {0} added", "MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host", "-*- coding: utf-8 -*- ''' :codeauthor: :email:`<NAME> <<EMAIL>>` ''' # Import Python libs", "# probe already existing server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with", "test_created(self): ''' Test to check if volume already exists ''' name = 'salt'", "mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt,", "[]}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing", "# # server2 was probed by address, 10.0.0.2. Under 3.4, server1 would be", "= MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock,", "with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers':", "with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be started'.format(name)) ret.update({'comment': comt,", "{'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip),", "test_started(self): ''' Test to check if volume has been started ''' name =", "'result': True, 'change': {'new': 'started', 'old': 'stopped'}}) self.assertDictEqual(glusterfs.started(name), ret) # 'add_volume_bricks' function tests:", "mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) #", "# Import Salt Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import", "salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer}", "known instead. Subsequent probing of server1 by server2 used to result in #", "return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers':", "exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret)", "tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs as mod_glusterfs", "bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result': False, 'comment': '',", "not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks),", "'Bricks already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result':", "= 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result':", "'changes': {}} mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict =", "= MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', ''])", "{'name': name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[], [name], [name],", "= {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs ''' #", "'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name:", "''' name = 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret = {'name': name,", "{'name': name, 'result': True, 'comment': '', 'changes': {}} # probe new peer server2", "exists ''' name = 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret =", "name = 'server1' ip = '10.0.0.1' comt = ('Host {0} already peered'.format(ip)) ret.update({'name':", "name = 'salt' bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name,", "mock = MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes':", "'', 'changes': {}} mock = MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst = MagicMock(return_value=[])", "= MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0} already", "[ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters comt", "1 def test_add_volume_bricks(self): ''' Test to add brick(s) to an existing volume '''", "Issue 30932: Peering an existing server by IP fails with gluster 3.7+ #", "now it should succeed in adding an alternate # hostname entry. name =", "Test to check if volume has been started ''' name = 'salt' ret", "as mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase):", "[], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt =", "started ''' name = 'salt' ret = {'name': name, 'result': False, 'comment': '',", "MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with", "name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does not exist', 'is", "3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []},", "= MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe", "Test to verify if node is peered. ''' name = 'server1' other_name =", "mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '',", "add brick(s) to an existing volume ''' name = 'salt' bricks = {'bricks':", "mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name:", "('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new': {name: []}, 'old':", "'']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual( glusterfs.add_volume_bricks(name,", "mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name:", "volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not exist'}) self.assertDictEqual(", "address, 10.0.0.2. Under 3.4, server1 would be # known as 10.0.0.1 but starting", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an existing server", "= MagicMock(side_effect=['does not exist', 'is not started', bricks, bricks, bricks, '']) mock_t =", "Libs from salt.states import glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs", "MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__,", "def test_peered(self): ''' Test to verify if node is peered. ''' name =", "self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be", "'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{},", "{'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret)", "added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': { 'old': {name: []}, 'new': {name:", "= MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []}, {ip: []}])", "# Import Python libs from __future__ import absolute_import # Import Salt Testing Libs", "{name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters", "'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in volume name.') ret.update({'comment': comt, 'result': False})", "peer server2 under gluster 3.4.x comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt,", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering an existing server by", "characters in peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) #", "be # known instead. Subsequent probing of server1 by server2 used to result", "= MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}])", "return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__,", "to result in # \"success_already_peer\" but now it should succeed in adding an", "Subsequent probing of server1 by server2 used to result in # \"success_already_peer\" but", "ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is already started'.format(name)) ret.update({'comment': comt,", "comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name',", "{'host1': '/srv/gluster/drive1'}} ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} mock", "glusterfs from tests.unit.modules.glusterfs_test import GlusterResults import salt.modules.glusterfs as mod_glusterfs import salt.utils.cloud import salt.modules.glusterfs", "bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be created'.format(name))", "import skipIf, TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers", "with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in volume name.') ret.update({'comment': comt,", "{0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time)", "added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new': {name: []}, 'old': {}}}) mock_xml", "libs from __future__ import absolute_import # Import Salt Testing Libs from salttesting import", "None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt", "new peer server2 under gluster 3.4.x comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment':", "ret) comt = ('Volume {0} is already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name),", "not exist', 'is not started', bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully", "bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ ==", "comt = ('Volume {0} will be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks),", "self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if", "'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0} already peered'.format(name)) ret.update({'comment': [],", "function tests: 1 def test_peered(self): ''' Test to verify if node is peered.", "{'test': True}): comt = ('Volume {0} will be started'.format(name)) ret.update({'comment': comt, 'result': None})", "'', 'result': False}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__': from integration", "= MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new", "MagicMock(return_value=True)): comt = ('Invalid characters in volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name,", "'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does not exist', 'is not", "mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}):", "gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{},", "mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does not exist'.format(name)) ret.update({'comment':", "{}} mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{},", "result in # \"success_already_peer\" but now it should succeed in adding an alternate", "adding an alternate # hostname entry. name = 'server1' ip = '10.0.0.1' comt", "to an existing volume ''' name = 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}}", "{'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name),", "already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "{'test': True}): comt = ('Volume {0} will be created'.format(name)) ret.update({'comment': comt, 'result': None})", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters comt =", "{0} is already started'.format(name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test':", "= '10.0.0.1' comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes':", "False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result':", "mod_glusterfs glusterfs.__salt__ = {'glusterfs.peer': mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): '''", "''' Test cases for salt.states.glusterfs ''' # 'peered' function tests: 1 def test_peered(self):", "with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers':", "be created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}):", "name = 'salt' ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}", "# Import Salt Testing Libs from salttesting import skipIf, TestCase from salttesting.mock import", "MagicMock(side_effect=[{name: []}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already", "ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create':", "def test_started(self): ''' Test to check if volume has been started ''' name", "comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will", "= {'name': name, 'result': True, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name], [],", "{0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time)", "comt = ('Volume {0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt =", "from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../')", "comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml", "with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in", "# probe new peer server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with", "Import Python libs from __future__ import absolute_import # Import Salt Testing Libs from", "bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume',", "'changes': {}} mock = MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst = MagicMock(return_value=[]) with", "of server1 by server2 used to result in # \"success_already_peer\" but now it", "('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with", "started', bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already in", "= 'salt' ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} mock", "with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}):", "an alternate # hostname entry. name = 'server1' ip = '10.0.0.1' comt =", "bricks), ret) ret.update({'comment': 'bricks successfully added', 'result': True, 'changes': {'new': ['host1'], 'old': ['host1']}})", "{'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip),", "test_peered(self): ''' Test to verify if node is peered. ''' name = 'server1'", "bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume', '']) with", "'changes': {'new': {name: []}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') #", "to check if volume already exists ''' name = 'salt' bricks = {'host1':", "mock = MagicMock(side_effect=['does not exist', 'is not started', bricks, bricks, bricks, '']) mock_t", "3.4.x comt = ('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml =", "['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests: 1 def test_started(self):", "ret.update({'comment': 'Bricks already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': '',", "self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0} is already started'.format(name)) ret.update({'comment': comt, 'result': True})", "{'new': {name: []}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}):", "True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests:", "check if volume already exists ''' name = 'salt' bricks = {'host1': '/srv/gluster/drive1',", "'', 'changes': {}} mock = MagicMock(side_effect=[[], [name], [name], [name]]) mock_t = MagicMock(return_value='started') mock_dict", "from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from salt.states import glusterfs", "{}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{ip: []},", "'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst =", "not started', bricks, bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already", "'glusterfs.create': mock_lst}): comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret)", "'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name, bricks), ret) # 'started' function tests: 1", "node is peered. ''' name = 'server1' other_name = 'server1' ret = {'name':", "bricks), ret) ret.update({'comment': 'Bricks already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret)", "comt = ('Invalid characters in peer name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False})", "for invalid characters comt = ('Invalid characters in peer name.') ret.update({'name': '#badhostname', 'comment':", "= MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test", "''' Test to verify if node is peered. ''' name = 'server1' other_name", "def test_created(self): ''' Test to check if volume already exists ''' name =", "be started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment':", "ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0}", "glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs '''", "[]}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # Issue 30932: Peering", "ip = '10.0.0.1' comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt,", "True, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[name], [], [], [], [name]]) mock_lst", "peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__',", "name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0} already", "''' Test to check if volume has been started ''' name = 'salt'", "ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[],", "mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name),", "3.7, its hostname of server1 would be # known instead. Subsequent probing of", "True, 'changes': {'new': ['host1'], 'old': ['host1']}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already", "'']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume': mock_t}): comt = ('Volume {0}", "''' name = 'server1' other_name = 'server1' ret = {'name': name, 'result': True,", "# known instead. Subsequent probing of server1 by server2 used to result in", "by server2 used to result in # \"success_already_peer\" but now it should succeed", "mock = MagicMock(side_effect=[{}, {name: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe", "[]}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0}", "volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt = ('Host {0}", "True, 'changes': {'new': {name: []}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__',", "been started ''' name = 'salt' ret = {'name': name, 'result': False, 'comment':", "Under 3.4, server1 would be # known as 10.0.0.1 but starting with 3.7,", "''' # 'peered' function tests: 1 def test_peered(self): ''' Test to verify if", "'peered' function tests: 1 def test_peered(self): ''' Test to verify if node is", "import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import", "ret) ret.update({'comment': 'Bricks already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment':", "[]}, 'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock", "self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'is not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment':", "ret) # Issue 30932: Peering an existing server by IP fails with gluster", "from salttesting import skipIf, TestCase from salttesting.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch)", "= MagicMock(side_effect=[{ip: []}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt =", "an existing volume ''' name = 'salt' bricks = {'bricks': {'host1': '/srv/gluster/drive1'}} ret", "with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will be created'.format(name)) ret.update({'comment': comt,", "ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} # probe new", "to check if volume has been started ''' name = 'salt' ret =", "[name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume", "ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud,", "'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume {0} will", "[]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added successfully.'.format(ip))", "'', 'changes': {}} mock = MagicMock(side_effect=['does not exist', 'is not started', bricks, bricks,", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters comt = ('Invalid", "MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with", "= ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test':", "mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.7.x mock_xml =", "mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2 under gluster 3.7.x mock_xml =", "ret.update({'comment': comt, 'result': True}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt = ('Volume", "name.') ret.update({'name': '#badhostname', 'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests:", "MagicMock( return_value=GlusterResults.v37.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: []}]) with", "{0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}): comt", "bricks, bricks, '']) mock_t = MagicMock(side_effect=['bricks successfully added', 'Bricks already in volume', ''])", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip,", "patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: []}, {name: [ip]}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}):", "{0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': { 'old': {name: []}, 'new':", "mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{}, {name: []}])", "= ('Host {0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname'])", "mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name:", "patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters in volume", "'changes': {}} # probe new peer server2 under gluster 3.4.x comt = ('Peer", "'changes': { 'old': {name: []}, 'new': {name: [ip]}}}) mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_first_time) with", "started'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.started(name), ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started',", "patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.4.x", "10.0.0.1 but starting with 3.7, its hostname of server1 would be # known", "[]}, {ip: []}]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0}", "comt = ('Invalid characters in volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks),", "mock}): self.assertDictEqual(glusterfs.peered(ip), ret) # test for invalid characters comt = ('Invalid characters in", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(name), ret) # probe new peer server2 under gluster", "alternate # hostname entry. name = 'server1' ip = '10.0.0.1' comt = ('Host", "self.assertDictEqual(glusterfs.peered(name), ret) # probe already existing server2 under gluster 3.7.x mock_xml = MagicMock(", "created'.format(name)) ret.update({'comment': comt, 'result': None}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': False}): with", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment':", "under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock =", "mock_t}): comt = ('Volume {0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt", "it should succeed in adding an alternate # hostname entry. name = 'server1'", "new peer server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run':", "MagicMock( return_value=GlusterResults.v37.peer_probe.success_first_ip_from_second_second_time) with patch.dict('salt.modules.glusterfs.__salt__', {'cmd.run': mock_xml}): mock = MagicMock(side_effect=[{name: [ip]}, {name: [ip]}]) with", "with patch.dict(glusterfs.__salt__, {'glusterfs.list_peers': mock}): self.assertDictEqual(glusterfs.peered(ip), ret) comt = ('Peer {0} added successfully.'.format(ip)) ret.update({'name':", "ret) with patch.dict(glusterfs.__opts__, {'test': False}): with patch.object(salt.utils.cloud, 'check_name', MagicMock(return_value=True)): comt = ('Invalid characters", "self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) if __name__ == '__main__': from integration import run_tests run_tests(GlusterfsTestCase,", "{0} already peered'.format(name)) ret.update({'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_already_peer['hostname']) with patch.dict('salt.modules.glusterfs.__salt__',", "{0} already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'], 'old': []}}) self.assertDictEqual(glusterfs.created(name,", "hostname entry. name = 'server1' ip = '10.0.0.1' comt = ('Host {0} already", "False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=['does not exist', 'is not started',", "mock_lst}): comt = ('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with", "NO_MOCK_REASON, MagicMock, patch) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt Libs from", "('Volume {0} does not exist'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.started(name), ret) comt = ('Volume {0}", "mod_glusterfs.peer} glusterfs.__opts__ = {} @skipIf(NO_MOCK, NO_MOCK_REASON) class GlusterfsTestCase(TestCase): ''' Test cases for salt.states.glusterfs", "glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'Bricks already in volume', 'changes': {}}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks),", "under gluster 3.4.x comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True,", "tests: 1 def test_add_volume_bricks(self): ''' Test to add brick(s) to an existing volume", "'salt' ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} mock =", "'result': True, 'changes': {'new': {name: []}, 'old': {}}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_other) with", "already in volume', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.status': mock, 'glusterfs.add_volume_bricks': mock_t}): ret.update({'comment': 'does not", "not started'}) self.assertDictEqual( glusterfs.add_volume_bricks(name, bricks), ret) ret.update({'comment': 'bricks successfully added', 'result': True, 'changes':", "name, 'result': False, 'comment': '', 'changes': {}} mock = MagicMock(side_effect=[[], [name], [name], [name]])", "ret) comt = ('Host {0} already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}})", "# Issue 30932: Peering an existing server by IP fails with gluster 3.7+", "bricks = {'host1': '/srv/gluster/drive1', 'host2': '/srv/gluster/drive2'} ret = {'name': name, 'result': True, 'comment':", "with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started', 'old': 'stopped'}})", "server1 would be # known instead. Subsequent probing of server1 by server2 used", "comt = ('Peer {0} added successfully.'.format(name)) ret.update({'comment': comt, 'result': True, 'changes': {'new': {name:", "ret) with patch.dict(glusterfs.__opts__, {'test': False}): ret.update({'comment': 'started', 'result': True, 'change': {'new': 'started', 'old':", "= ('Host {0} already peered'.format(name)) ret.update({'comment': [], 'result': True, 'changes': {'new': ['salt'], 'old':", "ret) # test for invalid characters comt = ('Invalid characters in peer name.')", "MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}): comt = ('Volume {0} already exists.'.format(name))", "tests: 1 def test_peered(self): ''' Test to verify if node is peered. '''", "('Volume {0} already exists.'.format(name)) ret.update({'comment': comt}) self.assertDictEqual(glusterfs.created(name, bricks), ret) with patch.dict(glusterfs.__opts__, {'test': True}):", "('Invalid characters in volume name.') ret.update({'comment': comt, 'result': False}) self.assertDictEqual(glusterfs.created(name, bricks), ret) comt", "MagicMock(return_value='started') mock_dict = MagicMock(side_effect=[{}, '', '']) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.status': mock_dict, 'glusterfs.start_volume':", "already peered'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': {}}) mock_xml = MagicMock( return_value=GlusterResults.v34.peer_probe.success_first_ip_from_second_first_time) with", "[], [], [], [name]]) mock_lst = MagicMock(return_value=[]) with patch.dict(glusterfs.__salt__, {'glusterfs.list_volumes': mock, 'glusterfs.create': mock_lst}):", "ret) # probe new peer server2 under gluster 3.7.x mock_xml = MagicMock( return_value=GlusterResults.v37.peer_probe.success_other)", "False}) self.assertDictEqual(glusterfs.peered('#badhostname'), ret) # 'created' function tests: 1 def test_created(self): ''' Test to", "('Peer {0} added successfully.'.format(ip)) ret.update({'name': ip, 'comment': comt, 'changes': { 'old': {name: []}," ]
[ "CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [", "create plot fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if", "\"\"\" Create parts for title according to the type of plot Parameters ----------", "that were created \"\"\" fnames = [] for Var in self.img._iter_vars(**{'metric':metric}): if not", "be saved in Returns ------- fnames: list list of file names with all", "Returns ------- capt: str box caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts", "\\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({})", "str or list extensions which the files should be saved in save_file: bool,", "float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds)", "ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable doesn't have", "other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig,", "\"\"\" Save plot with name to self.out_dir Parameters ---------- out_name: str name of", "double metrics. Saves a figure and returns Matplotlib fig and ax objects for", "prevent key error when no CIs are in the netCDF if ci: bounds", "= [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary to respect old naming", "return plotting objects if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files:", "in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type ==", "of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title", "<gh_stars>0 # -*- coding: utf-8 -*- from pathlib import Path import seaborn as", "out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for TC", "- bounds[\"lower\"] ci_range = diff.mean() df.columns = [ df.columns[0] + \"\\nMean CI range:\"", "metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for", "the metric group if Var.g == 0: title = \"{} between all datasets\".format(globals._metric_name[metric])", "to old naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double':", "bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns = [ df.columns[0] + \"\\nMean CI", "for. out_name: str name of output file out_types: str or list extensions which", "return parts @staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup table for plot titles", "Parameters ---------- metric: str name of the metric out_types: str or list extensions", "ci[id].append(bounds) else: ci[id] = [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] =", "'True' in the initialization of the Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use", "for {}-{} ({}) with {}-{} ({}) and {}-{} ({}) as the references'} try:", "def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs ) -> tuple:", "Var: # when we only need reference dataset from variables (i.e. is the", "are in the netCDF if ci: bounds = ci[id] else: bounds = ci", "abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if", "= 0.02 # offset larger as common metrics have a shorter caption if", "image files of plots from the validation results in a QA4SMImage \"\"\" def", "with all the extensions \"\"\" fnames = [] # group Vars and CIs", "save_files: return fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) ->", "ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could", "outname: pathlib.Path correct path of the file \"\"\" out_name = Path(out_name) # provide", "QA4SMMetricVariab;e Var in the image to make the map for. out_name: str name", "type of plot \"\"\" # we stick to old naming convention names =", "a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create box plots from results", "and convention in qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup table for file", "if ci: bounds = ci[id] else: bounds = ci # create plot fig,", "message = \"type '{}' is not in the lookup table\".format(type) warn(message) @staticmethod #", "\" \"set to 'True' in the initialization of the Image.\") def get_dir(self, out_dir:str)", "metric name add_stats : bool, optional (default: from globals) Add stats of median,", "from warnings import warn class QA4SMPlotter(): \"\"\" Class to create image files of", "@staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup table for plot titles Parameters ----------", "# -*- coding: utf-8 -*- from pathlib import Path import seaborn as sns", "with \"Other Data:\" Yield ----- df: pd.DataFrame dataframe with variable values and caption", "Vars and CIs relative to the same dataset metric_tc, ci = {}, {}", "larger as common metrics have a shorter caption if globals.watermark_pos not in [None,", "in [None, False]: make_watermark(fig, offset=offset) return fig, ax def _save_plot(self, out_name:str, out_types:str='png') ->", "not existing else: out_dir = self.img.filepath.parent # use default otherwise return out_dir def", "the box bottom. tc: bool, default is False True if TC. Then, caption", "in the lookup table\".format(type) warn(message) def create_title(self, Var, type:str) -> str: \"\"\" Create", "(default: from globals) Add stats of median, iqr and N to the box", "elif Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else:", "title = self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str) -> str: \"\"\" Create", "**plotting_kwargs ) -> list: \"\"\" Plots values to a map, using the values", "be saved in save_file: bool, optional. Default is False wether to save the", "thereof) offset: float offset of boxplots Var: QA4SMMetricVariable, optional. Default is None Specified", "if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats", "boxplot for common and double metrics. Saves a figure and returns Matplotlib fig", "Saves a figure and returns Matplotlib fig and ax objects for further processing.", "output directory plotting_kwargs: arguments for mapplot function. \"\"\" Metric = self.img.metrics[metric] if Metric.g", "(ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if", "mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False) -> tuple:", "type: str type of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith", "ci_df, ci_Var in self._yield_values(metric=metric): # make sure they refer to the right variable", "to be plotted. If None, uses 'png' Returns ------- outname: pathlib.Path correct path", "else: title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use title for", "for ci_df, ci_Var in self._yield_values(metric=metric): # make sure they refer to the right", "Parameters ---------- type: str type of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of", "plt.close('all') if save_files: return fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs", "Returns ------- fnames: list list of file names with all the extensions \"\"\"", "out_types: str or list extensions which the files should be saved in save_file:", "= pd.concat(dfs) # values are all Nan or NaNf - not plotted if", "fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute())", "CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns = [", "in out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure',", "the file in the output directory plotting_kwargs: arguments for mapplot function Returns -------", "should be saved in save_all: bool, optional. Default is True. all plotted images", "dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name =", "pandas dataframes for all variables of a metric to plot Parameters ---------- metric:", ": str output filename (with or without extension) out_type : str, optional contains", "in save_file: bool, optional. Default is False wether to save the file in", "ref_meta, mds_meta, other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create", "met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count:", "-> str: \"\"\" Create name of the file Parameters ---------- Var: MetricVar variable", "list extensions which the files should be saved in save_file: bool, optional. Default", "(np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are", "file out_types: str or list extensions which the files should be saved in", "group if Var.g == 0: title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name =", "(default: None) Path to output generated plot. If None, defaults to the current", "to self.out_dir Parameters ---------- out_name: str name of output file out_types: str or", "= \"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width * (len(df.columns) + 1) figsize", "mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns =", "figure and returns Matplotlib fig and ax objects for further processing. Parameters ----------", "boxplot_tc( # todo: set limits to show confidence intervals self, metric:str, out_name:str=None, out_types:str='png',", "bounds = ci # create plot fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds,", "todo: set limits to show confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs", "**plotting_kwargs ) -> list: \"\"\" Creates a boxplot for common and double metrics.", "the metric out_types: str or list extensions which the files should be saved", "caption starts with \"Other Data:\" Yield ----- df: pd.DataFrame dataframe with variable values", "and returns Matplotlib fig and ax objects for further processing. Parameters ---------- metric", "and caption name Var: QA4SMMetricVariable variable corresponding to the dataframe \"\"\" Vars =", "box caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts = [] id, meta", "_titles_lut(type:str) -> str: \"\"\" Lookup table for plot titles Parameters ---------- type: str", "saved as .png to self.out_dir Parameters ---------- out_name : str output filename (with", "Parameters ---------- ds: pd.Series data on which stats are found med: bool iqr:", "(axis) caption Parameters ---------- Var: MetricVar variable for a metric tc: bool, default", "stats of the box (axis) caption Parameters ---------- ds: pd.Series data on which", "and plot settings depend on the metric group if Var.g == 0: title", "metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for id, values in metric_tc.items(): dfs, Var", "str: \"\"\" Lookup table for file names Parameters ---------- type: str type of", "as common metrics have a shorter caption if globals.watermark_pos not in [None, False]:", "fnames = [] # group Vars and CIs relative to the same dataset", "\"\"\" Standardized behaviour for filenames: if provided name has extension, it is kept;", "= {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison", "N to the box bottom. tc: bool, default is False True if TC.", "\"\"\" parts = [] ref, mds, other = [meta for meta in Var.get_varmeta()]", "_filenames_lut(type:str) -> str: \"\"\" Lookup table for file names Parameters ---------- type: str", "- not plotted if np.isnan(values.to_numpy()).all(): return None # create plot fig, ax =", "type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use title for plot, make watermark ax.set_title(title,", "self.out_dir.joinpath(out_name) # provide output file type if not out_path.suffix: if out_type[0] != '.':", "range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all Variables in the same", "fnames, values = [], [] ci = [] # we take the last", "Parameters ---------- image : QA4SMImg The results object. out_dir : str, optional (default:", "other input values. Parameters ---------- var : QA4SMMetricVariab;e Var in the image to", "for title \"\"\" parts = [] ref, mds, other = [meta for meta", "for file names Parameters ---------- type: str type of plot \"\"\" # we", "# todo: set limits to show confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False,", "out_name : str output filename (with or without extension) out_type : str, optional", "to save the file in the output directory plotting_kwargs: arguments for _boxplot_definition function", "the file in the output directory plotting_kwargs: arguments for _boxplot_definition function Returns -------", "list: \"\"\" Plots values to a map, using the values as color. Plots", "# make sure they refer to the right variable if ci_Var.is_CI and \\", "str, optional contains file extensions to be plotted. If None, uses 'png' Returns", "in self.img._iter_vars(**{'metric':metric}): Var = Var break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) #", "= [] # group Vars and CIs relative to the same dataset metric_tc,", "# title and plot settings depend on the metric group if Var.g ==", "or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all", "values. Parameters ---------- var : QA4SMMetricVariab;e Var in the image to make the", "variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds:", "of the plot Parameters ---------- Var: MetricVar variable for a metric type: str", "\"type '{}' is not in the lookup table\".format(type) warn(message) def create_title(self, Var, type:str)", "axis=1) # get the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range", "default is False True if TC. Then, caption starts with \"Other Data:\" Yield", "- bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean CI range:\"", "{\"upper\"/\"lower\": [CIs]} xticks: list caption to each boxplot (or triplet thereof) offset: float", "\"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts = [] id, meta = mds_meta", "\"\"\" fnames = [] # group Vars and CIs relative to the same", "plot Parameters ---------- metric: str metric name add_stats : bool, optional (default: from", "a metric type: str type of plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta,", "case mds meta is needed \"\"\" # plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format(", "make_watermark(fig, offset=offset) return fig, ax def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save", "else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary to respect", "save or return plotting objects if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all')", "QA4SMMetricVariable, optional. Default is None Specified in case mds meta is needed \"\"\"", "values to a map, using the values as color. Plots a scatterplot for", "\"\"\" Get iterable with pandas dataframes for all variables of a metric to", "out_dir : str, optional (default: None) Path to output generated plot. If None,", "in enumerate(Vars): values = Var.values[Var.varname] # changes if it's a common-type Var if", "stats are found med: bool iqr: bool count: bool statistics Returns ------- stats:", "else: ci[id] = [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df],", "mds_meta, other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot", "= self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) return fnames_bplot,", "metric to plot Parameters ---------- metric: str metric name add_stats : bool, optional", "naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc':", "of \\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic': '{}", "= self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values = Var.values[Var.varname] # changes if", "\"\"\" Creates a boxplot for common and double metrics. Saves a figure and", "to save the file in the output directory plotting_kwargs: arguments for mapplot function", "out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all Nan or NaNf - not", "fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric,", "plot Parameters ---------- Var: MetricVar variable for a metric type: str type of", "= self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch parts of the name", "name to self.out_dir Parameters ---------- out_name: str name of output file out_types: str", "= image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg object has not been loaded.", "[meta for meta in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif", "wether to save the file in the output directory plotting_kwargs: arguments for _boxplot_definition", "make if not existing else: out_dir = self.img.filepath.parent # use default otherwise return", "0.02 # offset larger as common metrics have a shorter caption if globals.watermark_pos", "= pd.concat(values) # values are all Nan or NaNf - not plotted if", "xticks: list caption to each boxplot (or triplet thereof) offset: float offset of", "Parameters ---------- type: str type of plot \"\"\" # we stick to old", "type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below workaround to avoid same names if", "out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs)", "metric_tc[id] = [df], Var for id, values in metric_tc.items(): dfs, Var = values", "for a given metric in the loaded file. Parameters ---------- metric : str", "workaround to avoid same names if not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else:", "type=type)) title = self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str) -> str: \"\"\"", "# concat upper and lower CI bounds of Variable, if present bounds =", "[out_types] for ext in out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file", ": list List of files that were created \"\"\" fnames = [] for", "else: out_dir = self.img.filepath.parent # use default otherwise return out_dir def _standard_filename(self, out_name:str,", "= '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df = values.to_frame(box_cap) yield df, Var", "---------- metric: str name of the metric out_types: str or list extensions which", "type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use title", "NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue # necessary if statement to prevent", "save the file in the output directory plotting_kwargs: arguments for mapplot function Returns", "[globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str) ->", "Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3:", "arguments for mapplot function Returns ------- fnames: list list of file names with", "= [] id, meta = mds_meta if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format(", "values are all Nan or NaNf - not plotted if np.isnan(values.to_numpy()).all(): return None", "Create box plots from results in a qa4sm output file. Parameters ---------- image", "scatterplot for ISMN and a image plot for other input values. Parameters ----------", "[Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type ==", "table\".format(type) warn(message) @staticmethod # todo: cange file names and convention in qa4sm def", "= [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str)", "to the output directory plotting_kwargs: arguments for mapplot function. \"\"\" Metric = self.img.metrics[metric]", "if tc: capt = 'Other Data:\\n' + capt return capt @staticmethod def _get_parts_name(Var,", "ds_parts = [] id, meta = mds_meta if tc: id, meta = other_meta", "image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg", "take the last iterated value for Var and use it for the file", "type='boxplot_basic') # save or return plotting objects if save_files: fnames = self._save_plot(out_name, out_types=out_types)", "diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns = [ df.columns[0] +", "is kept; otherwise, it is saved as .png to self.out_dir Parameters ---------- out_name", "Create name of the file Parameters ---------- Var: MetricVar variable for a metric", "mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name def _yield_values(self, metric:str,", "from qa4sm_reader.plot_utils import * from warnings import warn class QA4SMPlotter(): \"\"\" Class to", "parts @staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup table for plot titles Parameters", "they refer to the right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds)", "plotting objects if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return", "in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts =", "Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric, out_types=out_types,", "= Var.get_varmeta() # fetch parts of the name for the variable if not", "out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for common", "= [out_types] for ext in out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting", "**plotting_kwargs ) -> tuple: \"\"\" Plot and save boxplot and mapplot for a", "[ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary to respect old naming convention", "\\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({})", "\"type '{}' is not in the lookup table\".format(type) warn(message) @staticmethod # todo: cange", "continue # necessary if statement to prevent key error when no CIs are", "output filename (with or without extension) out_type : str, optional contains file extensions", "= mds_meta if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt", "type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) -> str:", "------- fnames: list list of file names with all the extensions \"\"\" fnames,", "map, using the values as color. Plots a scatterplot for ISMN and a", "= [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) #", "_boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\"", "stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap =", "and CIs relative to the same dataset metric_tc, ci = {}, {} for", "== Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable", "found med: bool iqr: bool count: bool statistics Returns ------- stats: str caption", "= self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') #", "a larger caption if Var.g == 0: offset = 0.02 # offset larger", "sure they refer to the right variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds):", "MetricVar variable for a metric tc: bool, default is False True if TC.", "self._save_plot(out_name, out_types=out_types) return fnames else: return fig, ax def mapplot_metric( self, metric:str, out_types:str='png',", "------- stats: str caption with summary stats \"\"\" # interquartile range iqr =", "'Other Data:\\n' + capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\"", "make sure they refer to the right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds", "= [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable doesn't have CIs", "\"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create box plots from results in a", "_save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save plot with name to self.out_dir Parameters", "out_path = out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str:", "[bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for id,", "fnames.extend(fns) plt.close('all') if fnames: return fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs", "ci.append(bounds) values.append(df) # put all Variables in the same dataframe values = pd.concat(values)", "out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name = out_name # save or return", "range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [ df.columns[0]", "convention in qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup table for file names", "Mapplot for all variables for a given metric in the loaded file. Parameters", "for mapplot function Returns ------- fnames: list list of file names with all", "for the file name for df, Var in self._yield_values(metric=metric): if not Var.is_CI: #", "create_filename(self, Var, type:str) -> str: \"\"\" Create name of the file Parameters ----------", "caption if globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset) return fig, ax def", "fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames def mapplot_var( self,", "pad=globals.title_pad) # add watermark if self.img.has_CIs: offset = 0.06 # offset smaller as", "utf-8 -*- from pathlib import Path import seaborn as sns import pandas as", "common and double metrics. Saves a figure and returns Matplotlib fig and ax", "plotting_kwargs: arguments for mapplot function Returns ------- fnames: list list of file names", "type of plot Parameters ---------- Var: MetricVar variable for a metric type: str", "fnames: list list of file names with all the extensions \"\"\" ref_meta, mds_meta,", "the files should be saved in save_all: bool, optional. Default is True. all", "Var = values df = pd.concat(dfs) # values are all Nan or NaNf", "warn(message) def create_title(self, Var, type:str) -> str: \"\"\" Create title of the plot", "metric type: str type of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title", "np.isnan(df.to_numpy()).all(): continue # necessary if statement to prevent key error when no CIs", "in metric_tc.items(): dfs, Var = values df = pd.concat(dfs) # values are all", "iqr: bool count: bool statistics Returns ------- stats: str caption with summary stats", "we only need reference dataset from variables (i.e. is the same): for Var", "default is False True if TC. Then, caption starts with \"Other Data:\" Returns", "if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n", "coding: utf-8 -*- from pathlib import Path import seaborn as sns import pandas", "that is collected from the file for all datasets and combined into one", "and combined into one plot. out_name: str name of output file out_types: str", "# save or return plotting objects if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all')", "on which stats are found med: bool iqr: bool count: bool statistics Returns", "plots from the validation results in a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None):", "old naming convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0],", "== \"mapplot_tc\": # necessary to respect old naming convention for dss in Var.other_dss:", "if TC. Then, caption starts with \"Other Data:\" Returns ------- capt: str box", "Parameters ---------- df: pd.DataFrame dataframe to plot type: str one of _titles_lut ci:", "= image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except: warn(\"The initialized", "= Path(out_name) # provide output directory out_path = self.out_dir.joinpath(out_name) # provide output file", ") -> list: \"\"\" Plots values to a map, using the values as", "extensions which the files should be saved in Returns ------- fnames: list list", "boxplot (or triplet thereof) offset: float offset of boxplots Var: QA4SMMetricVariable, optional. Default", "fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all Nan or", "name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch parts of the", "[figwidth, globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if", "# generate plot figwidth = globals.boxplot_width * (len(df.columns) + 1) figsize = [figwidth,", "# we take the last iterated value for Var and use it for", "specified, otherwise same directory as the one storing the netCDF file\"\"\" if out_dir:", "self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name: out_name = self.create_filename(Var,", "fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return fig, ax def boxplot_tc(", "{:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all Variables in the same dataframe values", "title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc')", "globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file or", "Var: MetricVar variable for a metric type: str type of plot Returns -------", "self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use title for plot, make watermark", "= '.' + out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True,", "\"\"\" # we stick to old naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common':", "'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as e: message =", "for meta in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type", "if not Var.is_CI: id, names = Var.metric_ds bounds = [] for ci_df, ci_Var", "save_all: bool, optional. Default is True. all plotted images are saved to the", "meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if tc: capt = 'Other Data:\\n' +", "the references'} try: return titles[type] except KeyError as e: message = \"type '{}'", "Class to create image files of plots from the validation results in a", "-> str: \"\"\" Create the dataset part of the box (axis) caption Parameters", "interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = [] if med:", "'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as e: message = \"type", "------- fnames: list list of file names with all the extensions \"\"\" ref_meta,", "fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\"", "parts for title according to the type of plot Parameters ---------- Var: MetricVar", "Create title of the plot Parameters ---------- Var: MetricVar variable for a metric", "list of file names with all the extensions \"\"\" fnames = [] if", "# could be that variable doesn't have CIs bounds = pd.concat(bounds, axis=1) #", "\"\"\" Plot and save boxplot and mapplot for a certain metric Parameters ----------", "save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True,", "out_type:str='png') -> Path: \"\"\" Standardized behaviour for filenames: if provided name has extension,", "caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts = [] id, meta =", "in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names = Var.metric_ds bounds = []", "out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots values to a map,", "the same dataframe values = pd.concat(values) # values are all Nan or NaNf", "ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts = [] id, meta = mds_meta if", "TC metrics. Saves a figure and returns Matplotlib fig and ax objects for", "out_types=out_types) plt.close('all') return fnames else: return fig, ax def boxplot_tc( # todo: set", "# offset smaller as CI variables have a larger caption if Var.g ==", "if not out_path.suffix: if out_type[0] != '.': out_type = '.' + out_type out_path", "= self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types,", "save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots values to a map, using the", "fnames = [] for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns", "out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for common and", "in the output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames: list", "if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\":", "to respect old naming convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0],", "'load_data' needs to be \" \"set to 'True' in the initialization of the", "in Returns ------- fnames: list list of file names with all the extensions", "lookup table\".format(type) warn(message) @staticmethod # todo: cange file names and convention in qa4sm", "names and convention in qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup table for", "names with all the extensions \"\"\" fnames = [] # group Vars and", "Var in self._yield_values(metric=metric): if not Var.is_CI: # concat upper and lower CI bounds", "return fig, ax def boxplot_tc( # todo: set limits to show confidence intervals", "from results in a qa4sm output file. Parameters ---------- image : QA4SMImg The", "offset of boxplots Var: QA4SMMetricVariable, optional. Default is None Specified in case mds", "= ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if", "\"\"\" Create title of the plot Parameters ---------- Var: MetricVar variable for a", "False]: make_watermark(fig, offset=offset) return fig, ax def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\"", "mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary to", "for all datasets and combined into one plot. out_name: str name of output", "self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\" Define", "caption name Var: QA4SMMetricVariable variable corresponding to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric})", "ax def boxplot_tc( # todo: set limits to show confidence intervals self, metric:str,", "in the output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames :", "ax = boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not Var: #", "# if None, extent is sutomatically adjusted (as opposed to img.extent) **plotting_kwargs) #", "warnings import warn class QA4SMPlotter(): \"\"\" Class to create image files of plots", ": QA4SMImg The results object. out_dir : str, optional (default: None) Path to", "ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if tc: capt =", "intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a", "list List of files that were created \"\"\" fnames = [] for Var", "{}-{} ({}) with {}-{} ({}) and {}-{} ({}) as the references'} try: return", "title of the plot Parameters ---------- Var: MetricVar variable for a metric type:", "fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot", "str one of _titles_lut ci: dict Dict of dataframes with the lower and", "fnames: list list of file names with all the extensions \"\"\" fnames =", "specified if not out_dir.exists(): out_dir.mkdir() # make if not existing else: out_dir =", "directory plotting_kwargs: arguments for _boxplot_definition function Returns ------- fnames: list list of file", "if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return fig, ax", "out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot and save boxplot and mapplot", "generated plot. If None, defaults to the current working directory. \"\"\" self.img =", "the lookup table\".format(type) warn(message) @staticmethod # todo: cange file names and convention in", "the output directory plotting_kwargs: arguments for mapplot function. \"\"\" Metric = self.img.metrics[metric] if", "all Nan or NaNf - not plotted else: continue if save_files: fnames.extend(fns) plt.close('all')", "Path: \"\"\" Standardized behaviour for filenames: if provided name has extension, it is", "def create_title(self, Var, type:str) -> str: \"\"\" Create title of the plot Parameters", "smaller as CI variables have a larger caption if Var.g == 0: offset", "variable for a metric type: str type of plot \"\"\" name = self._filenames_lut(type=type)", "without extension) out_type : str, optional contains file extensions to be plotted. If", "self.img.has_CIs: offset = 0.06 # offset smaller as CI variables have a larger", "NaNf - not plotted if np.isnan(values.to_numpy()).all(): return None # create plot fig, ax", "**plotting_kwargs ) -> list: \"\"\" Creates a boxplot for TC metrics. Saves a", "self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use", "collected from the file for all datasets and combined into one plot. out_name:", "titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc':", "= boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not Var: # when", "not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file or just", "\" {:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if", "Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots values to a", "and N to the box bottom. tc: bool, default is False True if", "self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all,", "present bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure they", "and \\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound]", "in a qa4sm output file. Parameters ---------- image : QA4SMImg The results object.", "(axis) caption Parameters ---------- ds: pd.Series data on which stats are found med:", "== 0: offset = 0.02 # offset larger as common metrics have a", "extensions which the files should be saved in save_all: bool, optional. Default is", "they refer to the right variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns", "\"\"\" Create name of the file Parameters ---------- Var: MetricVar variable for a", "!= '.': out_type = '.' + out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod", "return fig, ax def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save plot with", "starts with \"Other Data:\" Returns ------- capt: str box caption \"\"\" ref_meta, mds_meta,", "generate plot figwidth = globals.boxplot_width * (len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height]", "show confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\"", "bool, default is False True if TC. Then, caption starts with \"Other Data:\"", "def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts for title according to the", "dfs, Var = values df = pd.concat(dfs) # values are all Nan or", "Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types,", "Create parts for title according to the type of plot Parameters ---------- Var:", "self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs: offset = 0.06 #", "metrics have a shorter caption if globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset)", "which the files should be saved in save_file: bool, optional. Default is False", "Var.g == 0: box_cap_ds = 'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) #", "str: \"\"\" Lookup table for plot titles Parameters ---------- type: str type of", "uses 'png' Returns ------- outname: pathlib.Path correct path of the file \"\"\" out_name", "and use it for the file name for df, Var in self._yield_values(metric=metric): if", "names with all the extensions \"\"\" fnames = [] if isinstance(out_types, str): out_types", "use default otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized", "= [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N:", "not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values", "df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df)", "{}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png',", "== 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) -> str: \"\"\"", "common-type Var if Var.g == 0: box_cap_ds = 'All datasets' else: box_cap_ds =", "boxplots Var: QA4SMMetricVariable, optional. Default is None Specified in case mds meta is", "as .png to self.out_dir Parameters ---------- out_name : str output filename (with or", "not in the lookup table\".format(type) warn(message) def create_title(self, Var, type:str) -> str: \"\"\"", "Save plot with name to self.out_dir Parameters ---------- out_name: str name of output", "saved in save_all: bool, optional. Default is True. all plotted images are saved", "'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as e: message", "'\\n and \\n'.join(ds_parts) if tc: capt = 'Other Data:\\n' + capt return capt", "out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots values to a map, using", "if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames def", "out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour for filenames: if provided name has", "else: return fig, ax def boxplot_tc( # todo: set limits to show confidence", "type='boxplot_tc') else: save_name = out_name # save or return plotting objects if save_files:", "type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var,", "\\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic': '{} for", "the validation results in a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create", "are all Nan or NaNf - not plotted if np.isnan(values.to_numpy()).all(): return None #", "file names Parameters ---------- type: str type of plot \"\"\" # we stick", "not out_dir.exists(): out_dir.mkdir() # make if not existing else: out_dir = self.img.filepath.parent #", "= Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric,", "iterated value for Var and use it for the file name for df,", "to a map, using the values as color. Plots a scatterplot for ISMN", "the file Parameters ---------- Var: MetricVar variable for a metric type: str type", "[], [] ci = [] # we take the last iterated value for", "boxplot and mapplot for a certain metric Parameters ---------- metric: str name of", "= [meta for meta in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']])", "pathlib import Path import seaborn as sns import pandas as pd from qa4sm_reader.img", "values = pd.concat(values) # values are all Nan or NaNf - not plotted", "ISMN and a image plot for other input values. Parameters ---------- var :", "Path(out_dir) # use directory if specified if not out_dir.exists(): out_dir.mkdir() # make if", "color. Plots a scatterplot for ISMN and a image plot for other input", "Plot and save boxplot and mapplot for a certain metric Parameters ---------- metric:", "out_dir:str) -> Path: \"\"\"Use output path if specified, otherwise same directory as the", "the image to make the map for. out_name: str name of output file", "\"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title", "'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0])", "\"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds) else: ci[id]", "of a metric to plot Parameters ---------- metric: str metric name add_stats :", "variables for that metric. out_name: str name of output file out_types: str or", "Name of a metric. File is searched for variables for that metric. out_name:", "plotting objects if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return", "= self.create_filename(Var, type='mapplot_tc') # use title for plot, make watermark ax.set_title(title, pad=globals.title_pad) if", "None, uses 'png' Returns ------- outname: pathlib.Path correct path of the file \"\"\"", "save_name = out_name # save or return plotting objects if save_files: fns =", "Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all Nan", "tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and", "self._yield_values(metric=metric): if not Var.is_CI: # concat upper and lower CI bounds of Variable,", "plotted. If None, uses 'png' Returns ------- outname: pathlib.Path correct path of the", "# interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = [] if", "parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width * (len(df.columns)", "if present bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure", "no CIs are in the netCDF if ci: bounds = ci[id] else: bounds", "metric that is collected from the file for all datasets and combined into", "try: return titles[type] except KeyError as e: message = \"type '{}' is not", "0.06 # offset smaller as CI variables have a larger caption if Var.g", "type: str type of plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta =", "NaNf - not plotted else: continue if save_files: fnames.extend(fns) plt.close('all') if fnames: return", "2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot =", "file names with all the extensions \"\"\" fnames, values = [], [] ci", "not in the lookup table\".format(type) warn(message) @staticmethod # todo: cange file names and", "not out_path.suffix: if out_type[0] != '.': out_type = '.' + out_type out_path =", "if not existing else: out_dir = self.img.filepath.parent # use default otherwise return out_dir", "id in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df)", "= \"type '{}' is not in the lookup table\".format(type) warn(message) def create_title(self, Var,", "names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try:", "= self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df = values.to_frame(box_cap)", "try: self.img.vars except: warn(\"The initialized QA4SMImg object has not been loaded. 'load_data' needs", "values = [], [] ci = [] # we take the last iterated", "ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary to respect old naming convention for", "def boxplot_tc( # todo: set limits to show confidence intervals self, metric:str, out_name:str=None,", "that metric. out_name: str name of output file out_types: str or list extensions", "certain metric Parameters ---------- metric: str name of the metric out_types: str or", "self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch parts of the name for", "make sure they refer to the right variable if ci_Var.is_CI and (ci_Var.metric_ds ==", "metric part with stats of the box (axis) caption Parameters ---------- ds: pd.Series", "[df], Var for id, values in metric_tc.items(): dfs, Var = values df =", "{'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type]", "---------- Var: MetricVar variable for a metric type: str type of plot Returns", "same directory as the one storing the netCDF file\"\"\" if out_dir: out_dir =", "import seaborn as sns import pandas as pd from qa4sm_reader.img import QA4SMImg import", "of \\n{} \\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor", "the last iterated value for Var and use it for the file name", "titles Parameters ---------- type: str type of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison", "in the netCDF if ci: bounds = ci[id] else: bounds = ci #", "df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all", "for df, Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names = Var.metric_ds", "file. Parameters ---------- metric : str Name of a metric. File is searched", "ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']])", "been loaded. 'load_data' needs to be \" \"set to 'True' in the initialization", "type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts", "\"mapplot_tc\": # necessary to respect old naming convention for dss in Var.other_dss: parts.extend([dss[0],", "Var.is_CI: id, names = Var.metric_ds bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric):", "or list extensions which the files should be saved in Returns ------- fnames:", "convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'}", "diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [ df.columns[0] +", "Parameters ---------- var : QA4SMMetricVariab;e Var in the image to make the map", "'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic':", "(or triplet thereof) offset: float offset of boxplots Var: QA4SMMetricVariable, optional. Default is", "ci = [] # we take the last iterated value for Var and", "Returns ------- parts: list list of parts for title \"\"\" parts = []", "directory out_path = self.out_dir.joinpath(out_name) # provide output file type if not out_path.suffix: if", "[None, False]: make_watermark(fig, offset=offset) return fig, ax def _save_plot(self, out_name:str, out_types:str='png') -> list:", "list extensions which the files should be saved in Returns ------- fnames: list", "{:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if id", "0: title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g", ": str Name of a metric. File is searched for variables for that", "only need reference dataset from variables (i.e. is the same): for Var in", "str metric name add_stats : bool, optional (default: from globals) Add stats of", "\"\"\" fnames, values = [], [] ci = [] # we take the", "self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values = Var.values[Var.varname] # changes if it's", "1) figsize = [figwidth, globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci, label=label, figsize=figsize,", "[] id, meta = mds_meta if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id,", "Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values = Var.values[Var.varname] # changes", "type: str type of plot \"\"\" # we stick to old naming convention", "meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if tc: capt = 'Other Data:\\n'", "ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch parts of the name for the", "parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot figwidth =", "metric in the loaded file. Parameters ---------- metric : str Name of a", "self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var,", "e: message = \"type '{}' is not in the lookup table\".format(type) warn(message) @staticmethod", "box_stats) else: box_cap = box_cap_ds df = values.to_frame(box_cap) yield df, Var def _boxplot_definition(", "MetricVar variable for a metric type: str type of plot \"\"\" parts =", "file names and convention in qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup table", "plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch parts", "plot titles Parameters ---------- type: str type of plot \"\"\" titles = {'boxplot_basic':", "ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is sutomatically adjusted (as opposed to img.extent)", "or NaNf - not plotted else: continue if save_files: fnames.extend(fns) plt.close('all') if fnames:", "out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def", "if save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames def plot_metric( self, metric:str, out_types:str='png',", "Parameters ---------- metric : str Name of a metric. File is searched for", "mapplot for a certain metric Parameters ---------- metric: str name of the metric", "type:str, ci=None, offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\" Define parameters of plot", "reference dataset from variables (i.e. is the same): for Var in self.img._iter_vars(**{'metric':metric}): Var", "input values. Parameters ---------- var : QA4SMMetricVariab;e Var in the image to make", "the extensions \"\"\" fnames, values = [], [] ci = [] # we", "figsize=figsize, dpi=globals.dpi ) if not Var: # when we only need reference dataset", "# make if not existing else: out_dir = self.img.filepath.parent # use default otherwise", "lower and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to each", "0: offset = 0.02 # offset larger as common metrics have a shorter", "+ \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all Variables", "file or just return the image if save_files: fnames = self._save_plot(out_name, out_types=out_types) return", "function. \"\"\" Metric = self.img.metrics[metric] if Metric.g == 0 or Metric.g == 2:", "return fnames else: return fig, ax def boxplot_tc( # todo: set limits to", "names Parameters ---------- type: str type of plot \"\"\" # we stick to", "save_name = self.create_filename(Var, type='boxplot_tc') else: save_name = out_name # save or return plotting", "name Var: QA4SMMetricVariable variable corresponding to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for", "Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names = Var.metric_ds bounds =", "self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot and save boxplot", "be plotted. If None, uses 'png' Returns ------- outname: pathlib.Path correct path of", "seaborn as sns import pandas as pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals", "parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup table for", "== 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric, out_types=out_types, save_files=save_all,", "qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import * from warnings", "plot figwidth = globals.boxplot_width * (len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height] fig,", "provided name has extension, it is kept; otherwise, it is saved as .png", "is False True if TC. Then, caption starts with \"Other Data:\" Yield -----", "todo: cange file names and convention in qa4sm def _filenames_lut(type:str) -> str: \"\"\"", "if globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file", "other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if tc: capt", "self.ref = image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg object has not been", "needs to be \" \"set to 'True' in the initialization of the Image.\")", "name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable with", "if out_dir: out_dir = Path(out_dir) # use directory if specified if not out_dir.exists():", "self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name =", "one of _titles_lut ci: dict Dict of dataframes with the lower and upper", "Metric.g == 0 or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs)", ": str metric that is collected from the file for all datasets and", "if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for id, values", "fnames else: return fig, ax def boxplot_tc( # todo: set limits to show", "plotting_kwargs: arguments for mapplot function Returns ------- fnames : list List of files", "= box_cap_ds df = values.to_frame(box_cap) yield df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame,", "-> list: \"\"\" Creates a boxplot for TC metrics. Saves a figure and", "a map, using the values as color. Plots a scatterplot for ISMN and", "have CIs bounds = pd.concat(bounds, axis=1) # get the mean CI range diff", "* from warnings import warn class QA4SMPlotter(): \"\"\" Class to create image files", "= globals.boxplot_width * (len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height] fig, ax =", "if bounds: # could be that variable doesn't have CIs bounds = pd.concat(bounds,", "mds, other = [meta for meta in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0])", "pd.concat(dfs) # values are all Nan or NaNf - not plotted if np.isnan(df.to_numpy()).all():", "else: bounds = ci # create plot fig, ax = self._boxplot_definition( metric=metric, df=df,", "= self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames def mapplot_var( self, Var,", "diff.mean() df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if", "and double metrics. Saves a figure and returns Matplotlib fig and ax objects", "get_dir(self, out_dir:str) -> Path: \"\"\"Use output path if specified, otherwise same directory as", "QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create box plots from results in", "setting in global for caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap =", "in the same dataframe values = pd.concat(values) # values are all Nan or", "file\"\"\" if out_dir: out_dir = Path(out_dir) # use directory if specified if not", "mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": #", "provide output directory out_path = self.out_dir.joinpath(out_name) # provide output file type if not", "caption Parameters ---------- Var: MetricVar variable for a metric tc: bool, default is", "self.create_filename(Var, type='mapplot_tc') # use title for plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos", "QA4SMImg The results object. out_dir : str, optional (default: None) Path to output", "extensions \"\"\" fnames = [] # group Vars and CIs relative to the", "not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name = out_name # save or", "sutomatically adjusted (as opposed to img.extent) **plotting_kwargs) # title and plot settings depend", "[\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0],", "watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04)", "-*- coding: utf-8 -*- from pathlib import Path import seaborn as sns import", "plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title def", "------- fnames : list List of files that were created \"\"\" fnames =", "List of files that were created \"\"\" fnames = [] for Var in", "---------- ds: pd.Series data on which stats are found med: bool iqr: bool", "Add stats of median, iqr and N to the box bottom. tc: bool,", "'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas the reference',", "\"\"\" Creates a boxplot for TC metrics. Saves a figure and returns Matplotlib", "ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str)", "# offset larger as common metrics have a shorter caption if globals.watermark_pos not", "are all Nan or NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue # necessary", "use it for the file name for df, Var in self._yield_values(metric=metric): if not", "bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean CI range:\" \"", "parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return", "starts with \"Other Data:\" Yield ----- df: pd.DataFrame dataframe with variable values and", "{}-{} ({}) and {}-{} ({}) as the references'} try: return titles[type] except KeyError", "out_type[0] != '.': out_type = '.' + out_type out_path = out_path.with_suffix(out_type) return out_path", "is True. all plotted images are saved to the output directory plotting_kwargs: arguments", "return the image if save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames else: return", "Lookup table for file names Parameters ---------- type: str type of plot \"\"\"", "a scatterplot for ISMN and a image plot for other input values. Parameters", "caption with summary stats \"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr =", "is collected from the file for all datasets and combined into one plot.", "list caption to each boxplot (or triplet thereof) offset: float offset of boxplots", "fig, ax def boxplot_tc( # todo: set limits to show confidence intervals self,", "Then, caption starts with \"Other Data:\" Returns ------- capt: str box caption \"\"\"", "output file. Parameters ---------- image : QA4SMImg The results object. out_dir : str,", "of file names with all the extensions \"\"\" fnames, values = [], []", "last iterated value for Var and use it for the file name for", "warn class QA4SMPlotter(): \"\"\" Class to create image files of plots from the", "and ax objects for further processing. Parameters ---------- metric : str metric that", "df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name: out_name = self.create_filename(Var, type='boxplot_basic') #", "globals.watermark_pos, for_map=True, offset=0.04) # save file or just return the image if save_files:", "the type of plot Parameters ---------- Var: MetricVar variable for a metric type:", "output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames: list list of", "dataframe to plot type: str one of _titles_lut ci: dict Dict of dataframes", "met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod def", "@staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts for title according to", "to img.extent) **plotting_kwargs) # title and plot settings depend on the metric group", "in a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create box plots from", "df = values.to_frame(box_cap) yield df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None,", "type='mapplot_common') elif Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double')", "are all Nan or NaNf - not plotted else: continue if save_files: fnames.extend(fns)", "of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas", "= [figwidth, globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi )", "\\n'.join(ds_parts) if tc: capt = 'Other Data:\\n' + capt return capt @staticmethod def", "fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name:", "to the right variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound]", "pd.Series data on which stats are found med: bool iqr: bool count: bool", "bool statistics Returns ------- stats: str caption with summary stats \"\"\" # interquartile", "= self._save_plot(out_name, out_types=out_types) return fnames else: return fig, ax def mapplot_metric( self, metric:str,", "str type of plot \"\"\" # we stick to old naming convention names", "out_type : str, optional contains file extensions to be plotted. If None, uses", "] ci.append(bounds) values.append(df) # put all Variables in the same dataframe values =", "with {}-{} ({}) and {}-{} ({}) as the references'} try: return titles[type] except", "in qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup table for file names Parameters", "warn(\"The initialized QA4SMImg object has not been loaded. 'load_data' needs to be \"", "meta = mds_meta if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version']))", "as the references'} try: return titles[type] except KeyError as e: message = \"type", "bounds.append(ci_df) if bounds: # could be that variable doesn't have CIs bounds =", "dataframes for all variables of a metric to plot Parameters ---------- metric: str", "depend on the metric group if Var.g == 0: title = \"{} between", "name for the variable if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric]", "extension, it is kept; otherwise, it is saved as .png to self.out_dir Parameters", "Var.get_varmeta() ds_parts = [] id, meta = mds_meta if tc: id, meta =", "offset: float offset of boxplots Var: QA4SMMetricVariable, optional. Default is None Specified in", "capt = 'Other Data:\\n' + capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') ->", "Create the metric part with stats of the box (axis) caption Parameters ----------", "@staticmethod # todo: cange file names and convention in qa4sm def _filenames_lut(type:str) ->", "return title def create_filename(self, Var, type:str) -> str: \"\"\" Create name of the", "metric : str metric that is collected from the file for all datasets", "plt.close('all') if fnames: return fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs )", "confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to each boxplot (or triplet", "set limits to show confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs )", "else: metric_tc[id] = [df], Var for id, values in metric_tc.items(): dfs, Var =", "(ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that", "with all the extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric = Var.metric", "parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def", "all plotted images are saved to the output directory plotting_kwargs: arguments for mapplot", "offset=0.04) # save file or just return the image if save_files: fnames =", "optional (default: None) Path to output generated plot. If None, defaults to the", "(i.e. is the same): for Var in self.img._iter_vars(**{'metric':metric}): Var = Var break title", "bounds = pd.concat(bounds, axis=1) # get the mean CI range diff = bounds[\"upper\"]", "False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file or just return the image", "'mapplot_basic': '{} for {}-{} ({}) with {}-{} ({}) as the reference', 'mapplot_tc': '{}", "to each boxplot (or triplet thereof) offset: float offset of boxplots Var: QA4SMMetricVariable,", "ci_Var in self._yield_values(metric=metric): # make sure they refer to the right variable if", "are saved to the output directory plotting_kwargs: arguments for mapplot function. \"\"\" Metric", "extensions \"\"\" fnames, values = [], [] ci = [] # we take", "None) Path to output generated plot. If None, defaults to the current working", "arguments for _boxplot_definition function Returns ------- fnames: list list of file names with", "str caption with summary stats \"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr", "list list of file names with all the extensions \"\"\" ref_meta, mds_meta, other_meta", ": str, optional contains file extensions to be plotted. If None, uses 'png'", "put all Variables in the same dataframe values = pd.concat(values) # values are", "group Vars and CIs relative to the same dataset metric_tc, ci = {},", "out_name # save or return plotting objects if save_files: fns = self._save_plot(save_name, out_types=out_types)", ".png to self.out_dir Parameters ---------- out_name : str output filename (with or without", "[ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put", "fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) return", "range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median:", "the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas", "= bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean", "({}) \\nas the reference', 'mapplot_basic': '{} for {}-{} ({}) with {}-{} ({}) as", "mds_meta if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt =", "parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary", "be saved in save_all: bool, optional. Default is True. all plotted images are", "iterable with pandas dataframes for all variables of a metric to plot Parameters", "{} for df, Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names =", "for Var and use it for the file name for df, Var in", "as the reference', 'mapplot_tc': '{} for {}-{} ({}) with {}-{} ({}) and {}-{}", "save the file in the output directory plotting_kwargs: arguments for _boxplot_definition function Returns", "\"\"\" fnames = [] for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI):", "str, optional (default: None) Path to output generated plot. If None, defaults to", "box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df =", "**plotting_kwargs) # title and plot settings depend on the metric group if Var.g", "import pandas as pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals from", "as color. Plots a scatterplot for ISMN and a image plot for other", "ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot", "Returns ------- fnames : list List of files that were created \"\"\" fnames", "out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight')", "the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns", "{:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\"", ": bool, optional (default: from globals) Add stats of median, iqr and N", "all the extensions \"\"\" fnames = [] if isinstance(out_types, str): out_types = [out_types]", "if not Var.is_CI: # concat upper and lower CI bounds of Variable, if", "out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) ->", "-> list: \"\"\" Create parts for title according to the type of plot", "{}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith", "all Variables in the same dataframe values = pd.concat(values) # values are all", "extensions \"\"\" fnames = [] if isinstance(out_types, str): out_types = [out_types] for ext", "# setting in global for caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap", "variable if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0],", "tc:bool=False) -> tuple: \"\"\" Get iterable with pandas dataframes for all variables of", "out_types=out_types) return fnames else: return fig, ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False,", "# create plot fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs", "file in the output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames:", "list: \"\"\" Creates a boxplot for common and double metrics. Saves a figure", "**plotting_kwargs ) -> list: \"\"\" Mapplot for all variables for a given metric", "ci[id] = [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var", "title according to the type of plot Parameters ---------- Var: MetricVar variable for", "ci: dict Dict of dataframes with the lower and upper confidence intervals shape:", "---------- metric: str metric name add_stats : bool, optional (default: from globals) Add", "= self.img.metrics[metric] if Metric.g == 0 or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric,", "= [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width", "not plotted if np.isnan(values.to_numpy()).all(): return None # create plot fig, ax = self._boxplot_definition(", "Var, type:str) -> str: \"\"\" Create title of the plot Parameters ---------- Var:", "have a larger caption if Var.g == 0: offset = 0.02 # offset", "------- capt: str box caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts =", "list: \"\"\" Mapplot for all variables for a given metric in the loaded", "defaults to the current working directory. \"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir)", "str: \"\"\" Create the dataset part of the box (axis) caption Parameters ----------", "str name of the metric out_types: str or list extensions which the files", "the name for the variable if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts =", "changes if it's a common-type Var if Var.g == 0: box_cap_ds = 'All", "all variables of a metric to plot Parameters ---------- metric: str metric name", "pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import *", "summary stats \"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str", "type: str type of plot Returns ------- parts: list list of parts for", "-> str: \"\"\" Create the metric part with stats of the box (axis)", "({}) and {}-{} ({}) as the references'} try: return titles[type] except KeyError as", "ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below workaround to avoid same names", "with {}-{} ({}) as the reference', 'mapplot_tc': '{} for {}-{} ({}) with {}-{}", "caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap", "for id, values in metric_tc.items(): dfs, Var = values df = pd.concat(dfs) #", "output file out_types: str or list extensions which the files should be saved", "== 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot", "of the name for the variable if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts", "stats = '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create", "the reference', 'mapplot_basic': '{} for {}-{} ({}) with {}-{} ({}) as the reference',", "return fig, ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list:", "to the current working directory. \"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref", "capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts for", "and a image plot for other input values. Parameters ---------- var : QA4SMMetricVariab;e", "def _filenames_lut(type:str) -> str: \"\"\" Lookup table for file names Parameters ---------- type:", "to the right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and \\", "pad=globals.title_pad) if globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save", "otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour for", "as sns import pandas as pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as", "metric: str name of the metric out_types: str or list extensions which the", "variables (i.e. is the same): for Var in self.img._iter_vars(**{'metric':metric}): Var = Var break", "name = name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get", "= [] ref, mds, other = [meta for meta in Var.get_varmeta()] if type", "be that variable doesn't have CIs bounds = pd.concat(bounds, axis=1) # get the", "save_files=save_all, **plotting_kwargs) elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot", "extent is sutomatically adjusted (as opposed to img.extent) **plotting_kwargs) # title and plot", "return stats @staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create the dataset part", "a certain metric Parameters ---------- metric: str name of the metric out_types: str", "@staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create the metric part", "type of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({})", "str: \"\"\" Create the metric part with stats of the box (axis) caption", "np.isnan(values.to_numpy()).all(): return None # create plot fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic',", "\"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']]", "boxplot for TC metrics. Saves a figure and returns Matplotlib fig and ax", "a common-type Var if Var.g == 0: box_cap_ds = 'All datasets' else: box_cap_ds", "tc:bool=False) -> str: \"\"\" Create the dataset part of the box (axis) caption", "ci_range = float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range)", "'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError", "all the extensions \"\"\" fnames = [] # group Vars and CIs relative", "names if not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name = out_name #", ": str, optional (default: None) Path to output generated plot. If None, defaults", "(as opposed to img.extent) **plotting_kwargs) # title and plot settings depend on the", "searched for variables for that metric. out_name: str name of output file out_types:", "mapplot function Returns ------- fnames: list list of file names with all the", "qa4sm output file. Parameters ---------- image : QA4SMImg The results object. out_dir :", "= self.img.filepath.parent # use default otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png') ->", "str metric that is collected from the file for all datasets and combined", "plot settings depend on the metric group if Var.g == 0: title =", "\"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values = Var.values[Var.varname] #", "behaviour for filenames: if provided name has extension, it is kept; otherwise, it", "or NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue # necessary if statement to", "if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) ->", "+ out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True)", "(len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci,", "box_cap_ds df = values.to_frame(box_cap) yield df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str,", "[] ref, mds, other = [meta for meta in Var.get_varmeta()] if type ==", "for mapplot function Returns ------- fnames : list List of files that were", "mds_meta, other_meta = Var.get_varmeta() # fetch parts of the name for the variable", "of boxplots Var: QA4SMMetricVariable, optional. Default is None Specified in case mds meta", "all the extensions \"\"\" fnames, values = [], [] ci = [] #", "for df, Var in self._yield_values(metric=metric): if not Var.is_CI: # concat upper and lower", "Var.is_CI: # concat upper and lower CI bounds of Variable, if present bounds", "out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for TC metrics.", "\"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all Variables in", "of Variable, if present bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): #", "ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id]", "Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use output path if specified, otherwise same", "to show confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list:", "optional. Default is True. all plotted images are saved to the output directory", "iqr and N to the box bottom. tc: bool, default is False True", "fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\"", "self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below workaround to", "'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as e: message = \"type '{}' is", "of dataframes with the lower and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks:", "pathlib.Path correct path of the file \"\"\" out_name = Path(out_name) # provide output", "for a metric type: str type of plot Returns ------- parts: list list", "directory as the one storing the netCDF file\"\"\" if out_dir: out_dir = Path(out_dir)", "-> tuple: \"\"\" Define parameters of plot Parameters ---------- df: pd.DataFrame dataframe to", "out_name:str, out_types:str='png') -> list: \"\"\" Save plot with name to self.out_dir Parameters ----------", "dataset part of the box (axis) caption Parameters ---------- Var: MetricVar variable for", "({}) with {}-{} ({}) as the reference', 'mapplot_tc': '{} for {}-{} ({}) with", "to self.out_dir Parameters ---------- out_name : str output filename (with or without extension)", "'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'],", "objects if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames", "# create plot fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs )", "CIs bounds = pd.concat(bounds, axis=1) # get the mean CI range diff =", "the box (axis) caption Parameters ---------- ds: pd.Series data on which stats are", "_titles_lut ci: dict Dict of dataframes with the lower and upper confidence intervals", "names with all the extensions \"\"\" fnames, values = [], [] ci =", "mapplot function Returns ------- fnames : list List of files that were created", "({}) \\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic': '{} for {}-{} ({}) with", "table for file names Parameters ---------- type: str type of plot \"\"\" #", "'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as e:", "df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs )", "False True if TC. Then, caption starts with \"Other Data:\" Yield ----- df:", "@staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create the dataset part of the", "self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs)", "plots from results in a qa4sm output file. Parameters ---------- image : QA4SMImg", "for ext in out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name))", "= 'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) # setting in global for", "bool, optional (default: from globals) Add stats of median, iqr and N to", "return None # create plot fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci,", "or just return the image if save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames", "the file name for df, Var in self._yield_values(metric=metric): if not Var.is_CI: # concat", "or without extension) out_type : str, optional contains file extensions to be plotted.", "datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic')", "iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median()))", "str type of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{}", "of output file out_types: str or list extensions which the files should be", "float offset of boxplots Var: QA4SMMetricVariable, optional. Default is None Specified in case", "else: continue if save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames def plot_metric( self,", "if np.isnan(df.to_numpy()).all(): continue # necessary if statement to prevent key error when no", "the Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use output path if specified, otherwise", "metric type: str type of plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta", "other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup table for plot", "= 'Other Data:\\n' + capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list:", "of plot Returns ------- parts: list list of parts for title \"\"\" parts", "if save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames else: return fig, ax def", "with variable values and caption name Var: QA4SMMetricVariable variable corresponding to the dataframe", "extensions which the files should be saved in save_file: bool, optional. Default is", "df = pd.concat(dfs) # values are all Nan or NaNf - not plotted", "save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot for all variables for a given", "file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str, out_name:str=None,", "list list of file names with all the extensions \"\"\" fnames, values =", "plot with name to self.out_dir Parameters ---------- out_name: str name of output file", "box plots from results in a qa4sm output file. Parameters ---------- image :", "the output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames: list list", "title def create_filename(self, Var, type:str) -> str: \"\"\" Create name of the file", "plt.close('all') return fnames else: return fig, ax def boxplot_tc( # todo: set limits", "the netCDF if ci: bounds = ci[id] else: bounds = ci # create", "self.create_filename(Var, type='boxplot_basic') # save or return plotting objects if save_files: fnames = self._save_plot(out_name,", "intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to each boxplot (or triplet thereof)", "variables for a given metric in the loaded file. Parameters ---------- metric :", "loaded file. Parameters ---------- metric : str Name of a metric. File is", "of plot \"\"\" # we stick to old naming convention names = {'boxplot_basic':", "str: \"\"\" Create title of the plot Parameters ---------- Var: MetricVar variable for", "offset=offset) return fig, ax def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save plot", "range:\" \" {:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds]", "same names if not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name = out_name", "saved in save_file: bool, optional. Default is False wether to save the file", "except: warn(\"The initialized QA4SMImg object has not been loaded. 'load_data' needs to be", "ds: pd.Series data on which stats are found med: bool iqr: bool count:", "combined into one plot. out_name: str name of output file out_types: str or", "mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots values", "meta in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in", "def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots", "if not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name = out_name # save", "---------- type: str type of plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{}", "of the box (axis) caption Parameters ---------- Var: MetricVar variable for a metric", "relative to the same dataset metric_tc, ci = {}, {} for df, Var", "Variables in the same dataframe values = pd.concat(values) # values are all Nan", "not Var: # when we only need reference dataset from variables (i.e. is", "kept; otherwise, it is saved as .png to self.out_dir Parameters ---------- out_name :", "we stick to old naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc':", "a metric type: str type of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type))", "= Var break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if", "plot_extent=None, # if None, extent is sutomatically adjusted (as opposed to img.extent) **plotting_kwargs)", "True if TC. Then, caption starts with \"Other Data:\" Returns ------- capt: str", "# save or return plotting objects if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns)", "optional. Default is False wether to save the file in the output directory", "not in [None, False]: make_watermark(fig, offset=offset) return fig, ax def _save_plot(self, out_name:str, out_types:str='png')", "-> Path: \"\"\"Use output path if specified, otherwise same directory as the one", "for {}-{} ({}) with {}-{} ({}) as the reference', 'mapplot_tc': '{} for {}-{}", "is not in the lookup table\".format(type) warn(message) @staticmethod # todo: cange file names", "have a shorter caption if globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset) return", "statistics Returns ------- stats: str caption with summary stats \"\"\" # interquartile range", "were created \"\"\" fnames = [] for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all()", "self.out_dir Parameters ---------- out_name : str output filename (with or without extension) out_type", "qa4sm_reader.plot_utils import * from warnings import warn class QA4SMPlotter(): \"\"\" Class to create", "boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a", "for all variables for a given metric in the loaded file. Parameters ----------", "[ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if id in ci.keys():", "variable if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss):", "self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names = Var.metric_ds bounds = [] for", "tc: bool, default is False True if TC. Then, caption starts with \"Other", "ext in out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname,", "Specified in case mds meta is needed \"\"\" # plot label parts =", "variable doesn't have CIs bounds = pd.concat(bounds, axis=1) # get the mean CI", "# when we only need reference dataset from variables (i.e. is the same):", "Plots values to a map, using the values as color. Plots a scatterplot", "function Returns ------- fnames: list list of file names with all the extensions", "df: pd.DataFrame dataframe to plot type: str one of _titles_lut ci: dict Dict", "box_cap_ds = self._box_caption(Var, tc=tc) # setting in global for caption stats if globals.boxplot_printnumbers:", "label = \"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width * (len(df.columns) + 1)", "Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'],", "plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot", "of parts for title \"\"\" parts = [] ref, mds, other = [meta", "out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create", "which the files should be saved in save_all: bool, optional. Default is True.", "to the box bottom. tc: bool, default is False True if TC. Then,", "-> Path: \"\"\" Standardized behaviour for filenames: if provided name has extension, it", "it for the file name for df, Var in self._yield_values(metric=metric): if not Var.is_CI:", "the one storing the netCDF file\"\"\" if out_dir: out_dir = Path(out_dir) # use", "bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean CI", "if np.isnan(values.to_numpy()).all(): return None # create plot fig, ax = self._boxplot_definition( metric=metric, df=values,", "make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True,", "necessary if statement to prevent key error when no CIs are in the", ") if not out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save or return plotting", "Var=None, **kwargs ) -> tuple: \"\"\" Define parameters of plot Parameters ---------- df:", "in self._yield_values(metric=metric): if not Var.is_CI: # concat upper and lower CI bounds of", "bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs )", "is saved as .png to self.out_dir Parameters ---------- out_name : str output filename", "import Path import seaborn as sns import pandas as pd from qa4sm_reader.img import", "Var if Var.g == 0: box_cap_ds = 'All datasets' else: box_cap_ds = self._box_caption(Var,", "isinstance(out_types, str): out_types = [out_types] for ext in out_types: fname = self._standard_filename(out_name, out_type=ext)", "Parameters ---------- out_name : str output filename (with or without extension) out_type :", "{:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var,", "**plotting_kwargs ) if not out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save or return", "if None, extent is sutomatically adjusted (as opposed to img.extent) **plotting_kwargs) # title", "---------- image : QA4SMImg The results object. out_dir : str, optional (default: None)", "---------- metric : str metric that is collected from the file for all", "netCDF file\"\"\" if out_dir: out_dir = Path(out_dir) # use directory if specified if", "if not out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save or return plotting objects", "of plot Parameters ---------- df: pd.DataFrame dataframe to plot type: str one of", "bounds of Variable, if present bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric):", "current working directory. \"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref", "parts = [] ref, mds, other = [meta for meta in Var.get_varmeta()] if", "the file for all datasets and combined into one plot. out_name: str name", "for_map=True, offset=0.04) # save file or just return the image if save_files: fnames", "created \"\"\" fnames = [] for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or", "files should be saved in save_all: bool, optional. Default is True. all plotted", "for a metric type: str type of plot \"\"\" name = self._filenames_lut(type=type) ref_meta,", "yield df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs", "if id in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if id in metric_tc.keys():", "str or list extensions which the files should be saved in save_all: bool,", "in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files,", "other_meta = Var.get_varmeta() ds_parts = [] id, meta = mds_meta if tc: id,", "def __init__(self, image, out_dir:str=None): \"\"\" Create box plots from results in a qa4sm", "get the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean())", "files that were created \"\"\" fnames = [] for Var in self.img._iter_vars(**{'metric':metric}): if", "elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric,", "_box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create the metric part with stats", "self._yield_values(metric=metric): # make sure they refer to the right variable if ci_Var.is_CI and", "meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if", "= self.img.ref_dataset_grid_stepsize # create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None,", "for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None,", "cange file names and convention in qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup", "Variable, if present bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make", "= other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if tc:", "iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod", "bool iqr: bool count: bool statistics Returns ------- stats: str caption with summary", "return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts for title", "output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames : list List", "type of plot Returns ------- parts: list list of parts for title \"\"\"", "return fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list:", "df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\" Define parameters of", "== 0: title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif", "common metrics have a shorter caption if globals.watermark_pos not in [None, False]: make_watermark(fig,", "parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']])", "metric_tc.items(): dfs, Var = values df = pd.concat(dfs) # values are all Nan", "file Parameters ---------- Var: MetricVar variable for a metric type: str type of", "label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot figwidth", "ci: bounds = ci[id] else: bounds = ci # create plot fig, ax", "opposed to img.extent) **plotting_kwargs) # title and plot settings depend on the metric", "parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name", "Path: \"\"\"Use output path if specified, otherwise same directory as the one storing", "error when no CIs are in the netCDF if ci: bounds = ci[id]", "\"Other Data:\" Returns ------- capt: str box caption \"\"\" ref_meta, mds_meta, other_meta =", "out_path.suffix: if out_type[0] != '.': out_type = '.' + out_type out_path = out_path.with_suffix(out_type)", "objects if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return fig,", "naming convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'],", "var : QA4SMMetricVariab;e Var in the image to make the map for. out_name:", "med: bool iqr: bool count: bool statistics Returns ------- stats: str caption with", "tuple: \"\"\" Define parameters of plot Parameters ---------- df: pd.DataFrame dataframe to plot", "if provided name has extension, it is kept; otherwise, it is saved as", "returns Matplotlib fig and ax objects for further processing. Parameters ---------- metric :", "return fnames else: return fig, ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs", "object. out_dir : str, optional (default: None) Path to output generated plot. If", "fig, ax def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save plot with name", "Nan or NaNf - not plotted else: continue if save_files: fnames.extend(fns) plt.close('all') if", "\"\"\" fnames = [] if isinstance(out_types, str): out_types = [out_types] for ext in", "with summary stats \"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25]))", "the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values =", "save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames else: return fig, ax def mapplot_metric(", "# values are all Nan or NaNf - not plotted else: continue if", "of the metric out_types: str or list extensions which the files should be", "with \"Other Data:\" Returns ------- capt: str box caption \"\"\" ref_meta, mds_meta, other_meta", "as the one storing the netCDF file\"\"\" if out_dir: out_dir = Path(out_dir) #", "or return plotting objects if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames", "reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas the", "plot. If None, defaults to the current working directory. \"\"\" self.img = image", "# get the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range =", "plot Parameters ---------- df: pd.DataFrame dataframe to plot type: str one of _titles_lut", "None # create plot fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs", "when we only need reference dataset from variables (i.e. is the same): for", "Parameters ---------- Var: MetricVar variable for a metric type: str type of plot", "\" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all Variables in the same dataframe", "confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates", "Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return", "out_types = [out_types] for ext in out_types: fname = self._standard_filename(out_name, out_type=ext) if fname.exists():", "def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create the metric part with", "image to make the map for. out_name: str name of output file out_types:", "use directory if specified if not out_dir.exists(): out_dir.mkdir() # make if not existing", "plotting_kwargs: arguments for mapplot function. \"\"\" Metric = self.img.metrics[metric] if Metric.g == 0", "(with or without extension) out_type : str, optional contains file extensions to be", "df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if id", "\"\"\"Use output path if specified, otherwise same directory as the one storing the", "the same): for Var in self.img._iter_vars(**{'metric':metric}): Var = Var break title = self.create_title(Var,", "stats \"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str =", "dpi=globals.dpi ) if not Var: # when we only need reference dataset from", "and mapplot for a certain metric Parameters ---------- metric: str name of the", "from globals) Add stats of median, iqr and N to the box bottom.", "fetch parts of the name for the variable if not type in [\"mapplot_tc\",", "if isinstance(out_types, str): out_types = [out_types] for ext in out_types: fname = self._standard_filename(out_name,", "processing. Parameters ---------- metric : str metric that is collected from the file", "adjusted (as opposed to img.extent) **plotting_kwargs) # title and plot settings depend on", "the reference', 'mapplot_tc': '{} for {}-{} ({}) with {}-{} ({}) and {}-{} ({})", "for further processing. Parameters ---------- metric : str metric that is collected from", "box_cap = box_cap_ds df = values.to_frame(box_cap) yield df, Var def _boxplot_definition( self, metric:str,", "\"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas the reference',", "[] # we take the last iterated value for Var and use it", "import * from warnings import warn class QA4SMPlotter(): \"\"\" Class to create image", "= name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable", "pandas as pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils", "plot type: str one of _titles_lut ci: dict Dict of dataframes with the", "---------- metric : str Name of a metric. File is searched for variables", "initialized QA4SMImg object has not been loaded. 'load_data' needs to be \" \"set", "a metric type: str type of plot Returns ------- parts: list list of", "refer to the right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and", "boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not Var: # when we", "-> str: \"\"\" Lookup table for file names Parameters ---------- type: str type", "0: box_cap_ds = 'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) # setting in", "avoid same names if not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name =", "the extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize =", "'{}' is not in the lookup table\".format(type) warn(message) @staticmethod # todo: cange file", "fig, ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\"", "fig, ax = boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not Var:", "a figure and returns Matplotlib fig and ax objects for further processing. Parameters", "self.img._iter_vars(**{'metric':metric}): Var = Var break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add", "------- fnames: list list of file names with all the extensions \"\"\" fnames", "continue if save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames def plot_metric( self, metric:str,", "type of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return", "mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot for all", "in the lookup table\".format(type) warn(message) @staticmethod # todo: cange file names and convention", "each boxplot (or triplet thereof) offset: float offset of boxplots Var: QA4SMMetricVariable, optional.", "parts of the name for the variable if not type in [\"mapplot_tc\", \"mapplot_double\"]:", "None, extent is sutomatically adjusted (as opposed to img.extent) **plotting_kwargs) # title and", "def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable with pandas dataframes for", "metric_tc, ci = {}, {} for df, Var in self._yield_values(metric=metric, tc=True): if not", "return fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list:", "file \"\"\" out_name = Path(out_name) # provide output directory out_path = self.out_dir.joinpath(out_name) #", "(ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that", "is sutomatically adjusted (as opposed to img.extent) **plotting_kwargs) # title and plot settings", "out_types: str or list extensions which the files should be saved in save_all:", "'.' + out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True,", "\"\"\" # plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) #", "dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values = Var.values[Var.varname]", "['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc':", "create image files of plots from the validation results in a QA4SMImage \"\"\"", "title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g ==", "save file or just return the image if save_files: fnames = self._save_plot(out_name, out_types=out_types)", "right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss ==", "# use title for plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in", "type of plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() #", "type='boxplot_basic') -> list: \"\"\" Create parts for title according to the type of", "if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) #", "MetricVar variable for a metric type: str type of plot Returns ------- parts:", "for ISMN and a image plot for other input values. Parameters ---------- var", "metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot for all variables for", "Var.g == 0: title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common')", "mds_meta, other_meta = Var.get_varmeta() ds_parts = [] id, meta = mds_meta if tc:", "parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if", "refer to the right variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns =", "out_name = self.create_filename(Var, type='mapplot_tc') # use title for plot, make watermark ax.set_title(title, pad=globals.title_pad)", "list extensions which the files should be saved in save_all: bool, optional. Default", "extension) out_type : str, optional contains file extensions to be plotted. If None,", "\\nas the reference', 'mapplot_basic': '{} for {}-{} ({}) with {}-{} ({}) as the", "the extensions \"\"\" fnames = [] if isinstance(out_types, str): out_types = [out_types] for", "one plot. out_name: str name of output file out_types: str or list extensions", "Data:\" Returns ------- capt: str box caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta()", "= self._box_caption(Var, tc=tc) # setting in global for caption stats if globals.boxplot_printnumbers: box_stats", "all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title = self.create_title(Var=Var,", "the box (axis) caption Parameters ---------- Var: MetricVar variable for a metric tc:", "metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is sutomatically adjusted (as opposed", "self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames", "------- parts: list list of parts for title \"\"\" parts = [] ref,", "= \"type '{}' is not in the lookup table\".format(type) warn(message) @staticmethod # todo:", "\\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be", "else: box_cap_ds = self._box_caption(Var, tc=tc) # setting in global for caption stats if", "pd.DataFrame dataframe with variable values and caption name Var: QA4SMMetricVariable variable corresponding to", "\\nfor {}-{} ({}) \\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic': '{} for {}-{}", "== 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'],", "path of the file \"\"\" out_name = Path(out_name) # provide output directory out_path", "bottom. tc: bool, default is False True if TC. Then, caption starts with", "has extension, it is kept; otherwise, it is saved as .png to self.out_dir", "variable corresponding to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in", "output directory out_path = self.out_dir.joinpath(out_name) # provide output file type if not out_path.suffix:", "parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False) ->", "list: \"\"\" Save plot with name to self.out_dir Parameters ---------- out_name: str name", "in the initialization of the Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use output", "to 'True' in the initialization of the Image.\") def get_dir(self, out_dir:str) -> Path:", "Define parameters of plot Parameters ---------- df: pd.DataFrame dataframe to plot type: str", "= self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use title for plot, make", "QA4SMMetricVariable variable corresponding to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var", "bool, optional. Default is False wether to save the file in the output", "# group Vars and CIs relative to the same dataset metric_tc, ci =", "fnames: list list of file names with all the extensions \"\"\" fnames, values", "plot. out_name: str name of output file out_types: str or list extensions which", "ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is sutomatically adjusted (as opposed to", "# provide output file type if not out_path.suffix: if out_type[0] != '.': out_type", "working directory. \"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try:", "upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to each boxplot (or", "out_name: str name of output file out_types: str or list extensions which the", "one storing the netCDF file\"\"\" if out_dir: out_dir = Path(out_dir) # use directory", "= out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\"", "= [df], Var for id, values in metric_tc.items(): dfs, Var = values df", "list of parts for title \"\"\" parts = [] ref, mds, other =", "of _titles_lut ci: dict Dict of dataframes with the lower and upper confidence", "if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic(", "be \" \"set to 'True' in the initialization of the Image.\") def get_dir(self,", "in the output directory plotting_kwargs: arguments for _boxplot_definition function Returns ------- fnames: list", "of files that were created \"\"\" fnames = [] for Var in self.img._iter_vars(**{'metric':metric}):", "Parameters ---------- metric: str metric name add_stats : bool, optional (default: from globals)", "parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0])", "from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import * from", "plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos,", "str name of output file out_types: str or list extensions which the files", "figsize = [figwidth, globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi", "break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs: offset", "add watermark if self.img.has_CIs: offset = 0.06 # offset smaller as CI variables", "def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour for filenames: if provided", "and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to each boxplot", "tuple: \"\"\" Plot and save boxplot and mapplot for a certain metric Parameters", "If None, defaults to the current working directory. \"\"\" self.img = image self.out_dir", "# todo: cange file names and convention in qa4sm def _filenames_lut(type:str) -> str:", "iqr:bool=True, count:bool=True) -> str: \"\"\" Create the metric part with stats of the", "title and plot settings depend on the metric group if Var.g == 0:", "if out_type[0] != '.': out_type = '.' + out_type out_path = out_path.with_suffix(out_type) return", "[] # group Vars and CIs relative to the same dataset metric_tc, ci", "---------- out_name: str name of output file out_types: str or list extensions which", "str output filename (with or without extension) out_type : str, optional contains file", "if Metric.g == 0 or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all,", "the netCDF file\"\"\" if out_dir: out_dir = Path(out_dir) # use directory if specified", "\"\"\" Create box plots from results in a qa4sm output file. Parameters ----------", "capt: str box caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts = []", "is needed \"\"\" # plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label =", "larger caption if Var.g == 0: offset = 0.02 # offset larger as", "that variable doesn't have CIs bounds = pd.concat(bounds, axis=1) # get the mean", "\"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except:", "Standardized behaviour for filenames: if provided name has extension, it is kept; otherwise,", "Path import seaborn as sns import pandas as pd from qa4sm_reader.img import QA4SMImg", "dataset from variables (i.e. is the same): for Var in self.img._iter_vars(**{'metric':metric}): Var =", "str box caption \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() ds_parts = [] id,", "self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg object", "ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize,", "# fetch parts of the name for the variable if not type in", "TC. Then, caption starts with \"Other Data:\" Yield ----- df: pd.DataFrame dataframe with", "a metric to plot Parameters ---------- metric: str metric name add_stats : bool,", "provide output file type if not out_path.suffix: if out_type[0] != '.': out_type =", "# necessary if statement to prevent key error when no CIs are in", "metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name: out_name = self.create_filename(Var, type='boxplot_basic')", "Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable doesn't", "the image if save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames else: return fig,", "Var in self.img._iter_vars(**{'metric':metric}): Var = Var break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad)", "offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\" Define parameters of plot Parameters ----------", "save_file: bool, optional. Default is False wether to save the file in the", "type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save", "if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats", "plotted images are saved to the output directory plotting_kwargs: arguments for mapplot function.", "True. all plotted images are saved to the output directory plotting_kwargs: arguments for", "save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for TC metrics. Saves", "img.extent) **plotting_kwargs) # title and plot settings depend on the metric group if", "CIs are in the netCDF if ci: bounds = ci[id] else: bounds =", "globals) Add stats of median, iqr and N to the box bottom. tc:", "is False wether to save the file in the output directory plotting_kwargs: arguments", "\"\"\" out_name = Path(out_name) # provide output directory out_path = self.out_dir.joinpath(out_name) # provide", "ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name: out_name", "\\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic': '{} for {}-{} ({}) with {}-{}", "for plot titles Parameters ---------- type: str type of plot \"\"\" titles =", "to create image files of plots from the validation results in a QA4SMImage", "Below workaround to avoid same names if not out_name: save_name = self.create_filename(Var, type='boxplot_tc')", "otherwise, it is saved as .png to self.out_dir Parameters ---------- out_name : str", "**plotting_kwargs) elif Metric.g == 3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot =", "plot \"\"\" # we stick to old naming convention names = {'boxplot_basic': 'boxplot_{}',", "not Var.is_CI: id, names = Var.metric_ds bounds = [] for ci_df, ci_Var in", "the right variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df)", "# make sure they refer to the right variable if ci_Var.is_CI and (ci_Var.metric_ds", "or list extensions which the files should be saved in save_all: bool, optional.", "= Path(out_dir) # use directory if specified if not out_dir.exists(): out_dir.mkdir() # make", "in save_all: bool, optional. Default is True. all plotted images are saved to", "== Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds:", "metric tc: bool, default is False True if TC. Then, caption starts with", "the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns", "+ 1) figsize = [figwidth, globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci, label=label,", "parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup table", "of the Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use output path if specified,", "has not been loaded. 'load_data' needs to be \" \"set to 'True' in", "plot Returns ------- parts: list list of parts for title \"\"\" parts =", "values.append(df) # put all Variables in the same dataframe values = pd.concat(values) #", "pd.DataFrame dataframe to plot type: str one of _titles_lut ci: dict Dict of", "self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames def mapplot_var( self, Var, out_name:str=None,", "str type of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts)", "save. Below workaround to avoid same names if not out_name: save_name = self.create_filename(Var,", "same dataframe values = pd.concat(values) # values are all Nan or NaNf -", "# we stick to old naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}',", "---------- var : QA4SMMetricVariab;e Var in the image to make the map for.", "from variables (i.e. is the same): for Var in self.img._iter_vars(**{'metric':metric}): Var = Var", "for a certain metric Parameters ---------- metric: str name of the metric out_types:", "**plotting_kwargs ) # save. Below workaround to avoid same names if not out_name:", "saved in Returns ------- fnames: list list of file names with all the", "capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts for title according", "results in a qa4sm output file. Parameters ---------- image : QA4SMImg The results", "reference', 'mapplot_tc': '{} for {}-{} ({}) with {}-{} ({}) and {}-{} ({}) as", "Metric = self.img.metrics[metric] if Metric.g == 0 or Metric.g == 2: fnames_bplot =", "Yield ----- df: pd.DataFrame dataframe with variable values and caption name Var: QA4SMMetricVariable", "in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for id, values in metric_tc.items():", "Matplotlib fig and ax objects for further processing. Parameters ---------- metric : str", "[] ci = [] # we take the last iterated value for Var", "if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']:", "= self.out_dir.joinpath(out_name) # provide output file type if not out_path.suffix: if out_type[0] !=", "= self.create_filename(Var, type='boxplot_basic') # save or return plotting objects if save_files: fnames =", "Var, type:str) -> str: \"\"\" Create name of the file Parameters ---------- Var:", "= self.create_filename(Var, type='boxplot_tc') else: save_name = out_name # save or return plotting objects", "references'} try: return titles[type] except KeyError as e: message = \"type '{}' is", "self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot for all variables", "= ci[id] else: bounds = ci # create plot fig, ax = self._boxplot_definition(", "values and caption name Var: QA4SMMetricVariable variable corresponding to the dataframe \"\"\" Vars", "for mapplot function. \"\"\" Metric = self.img.metrics[metric] if Metric.g == 0 or Metric.g", "else: return fig, ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) ->", "= [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else: parts = [ref_meta[0], ref_meta[1]['short_name']] if type", "CI range:\" \" {:.3g}\".format(ci_range) ] ci.append(bounds) values.append(df) # put all Variables in the", "type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type", "with the lower and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption", "out_type = '.' + out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod def _box_stats(ds:pd.Series,", "return name def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable with pandas", "elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if", "lookup table\".format(type) warn(message) def create_title(self, Var, type:str) -> str: \"\"\" Create title of", "otherwise same directory as the one storing the netCDF file\"\"\" if out_dir: out_dir", "the lower and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to", "list: \"\"\" Creates a boxplot for TC metrics. Saves a figure and returns", "output file type if not out_path.suffix: if out_type[0] != '.': out_type = '.'", "results object. out_dir : str, optional (default: None) Path to output generated plot.", "type:str) -> str: \"\"\" Create title of the plot Parameters ---------- Var: MetricVar", "plot fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) #", "reference', 'mapplot_basic': '{} for {}-{} ({}) with {}-{} ({}) as the reference', 'mapplot_tc':", "a metric. File is searched for variables for that metric. out_name: str name", "= [] for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns =", "'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as e: message = \"type '{}'", "with all the extensions \"\"\" fnames, values = [], [] ci = []", "parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str) -> str:", "for variables for that metric. out_name: str name of output file out_types: str", "for common and double metrics. Saves a figure and returns Matplotlib fig and", "str type of plot Returns ------- parts: list list of parts for title", "pd.concat(values) # values are all Nan or NaNf - not plotted if np.isnan(values.to_numpy()).all():", "{}, {} for df, Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names", "parts: list list of parts for title \"\"\" parts = [] ref, mds,", "the variable if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta:", "# put all Variables in the same dataframe values = pd.concat(values) # values", "metric : str Name of a metric. File is searched for variables for", "\"\"\" Define parameters of plot Parameters ---------- df: pd.DataFrame dataframe to plot type:", "arguments for mapplot function. \"\"\" Metric = self.img.metrics[metric] if Metric.g == 0 or", "id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for id, values in", "title \"\"\" parts = [] ref, mds, other = [meta for meta in", "image : QA4SMImg The results object. out_dir : str, optional (default: None) Path", "to be \" \"set to 'True' in the initialization of the Image.\") def", "id, values in metric_tc.items(): dfs, Var = values df = pd.concat(dfs) # values", "Get iterable with pandas dataframes for all variables of a metric to plot", "pd.concat(bounds, axis=1) # get the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"]", "+ \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds) else:", "part with stats of the box (axis) caption Parameters ---------- ds: pd.Series data", "the right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss", "values as color. Plots a scatterplot for ISMN and a image plot for", "watermark if self.img.has_CIs: offset = 0.06 # offset smaller as CI variables have", "df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds)", "-> list: \"\"\" Creates a boxplot for common and double metrics. Saves a", "image if save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames else: return fig, ax", "({}) with {}-{} ({}) and {}-{} ({}) as the references'} try: return titles[type]", "self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str) -> str: \"\"\" Create name of", "statement to prevent key error when no CIs are in the netCDF if", "of plot Parameters ---------- Var: MetricVar variable for a metric type: str type", "Data:\" Yield ----- df: pd.DataFrame dataframe with variable values and caption name Var:", "def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save plot with name to self.out_dir", "return fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\"", "Var: QA4SMMetricVariable, optional. Default is None Specified in case mds meta is needed", "CI variables have a larger caption if Var.g == 0: offset = 0.02", "out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png',", "= '\\n and \\n'.join(ds_parts) if tc: capt = 'Other Data:\\n' + capt return", "------- outname: pathlib.Path correct path of the file \"\"\" out_name = Path(out_name) #", "the map for. out_name: str name of output file out_types: str or list", "to output generated plot. If None, defaults to the current working directory. \"\"\"", "and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could", "image, out_dir:str=None): \"\"\" Create box plots from results in a qa4sm output file.", "parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'],", "loaded. 'load_data' needs to be \" \"set to 'True' in the initialization of", "dataframes with the lower and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]} xticks: list", "of file names with all the extensions \"\"\" fnames = [] # group", "save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot and save boxplot and mapplot for", "wether to save the file in the output directory plotting_kwargs: arguments for mapplot", "{}-{} ({}) \\nas the reference', 'mapplot_basic': '{} for {}-{} ({}) with {}-{} ({})", "def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot and", "all Nan or NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue # necessary if", "should be saved in save_file: bool, optional. Default is False wether to save", "concat upper and lower CI bounds of Variable, if present bounds = []", "-> str: \"\"\" Create title of the plot Parameters ---------- Var: MetricVar variable", "== 0: box_cap_ds = 'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) # setting", "Creates a boxplot for TC metrics. Saves a figure and returns Matplotlib fig", "* (len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height] fig, ax = boxplot( df=df,", "name add_stats : bool, optional (default: from globals) Add stats of median, iqr", "contains file extensions to be plotted. If None, uses 'png' Returns ------- outname:", "variable for a metric tc: bool, default is False True if TC. Then,", "def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create the dataset part of the box", "same dataset metric_tc, ci = {}, {} for df, Var in self._yield_values(metric=metric, tc=True):", "metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig, ax = mapplot(df=Var.values[Var.varname],", "all variables for a given metric in the loaded file. Parameters ---------- metric", "self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg object has not", "metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\" Define parameters", "and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be", "offset larger as common metrics have a shorter caption if globals.watermark_pos not in", "+ capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts", "fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self,", "save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot for common and double", "extensions to be plotted. If None, uses 'png' Returns ------- outname: pathlib.Path correct", "just return the image if save_files: fnames = self._save_plot(out_name, out_types=out_types) return fnames else:", "initialization of the Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use output path if", "metric Parameters ---------- metric: str name of the metric out_types: str or list", "{}-{} ({}) \\nwith {}-{} ({}) \\nas the reference', 'mapplot_basic': '{} for {}-{} ({})", "as e: message = \"type '{}' is not in the lookup table\".format(type) warn(message)", "directory if specified if not out_dir.exists(): out_dir.mkdir() # make if not existing else:", "validation results in a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create box", "= [] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure they refer to", "out_types: str or list extensions which the files should be saved in Returns", "to make the map for. out_name: str name of output file out_types: str", "met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str)", "---------- type: str type of plot \"\"\" # we stick to old naming", "= Var.metric_ds bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure", "= '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create the", "into one plot. out_name: str name of output file out_types: str or list", "for other input values. Parameters ---------- var : QA4SMMetricVariab;e Var in the image", "if save_files: return fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs )", "metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable with pandas dataframes for all variables", "\"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title def create_filename(self,", "Var.values[Var.varname] # changes if it's a common-type Var if Var.g == 0: box_cap_ds", "shorter caption if globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset) return fig, ax", "ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) #", "file. Parameters ---------- image : QA4SMImg The results object. out_dir : str, optional", "name has extension, it is kept; otherwise, it is saved as .png to", "if globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset) return fig, ax def _save_plot(self,", "the lookup table\".format(type) warn(message) def create_title(self, Var, type:str) -> str: \"\"\" Create title", "Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig, ax =", "if not Var: # when we only need reference dataset from variables (i.e.", "Nan or NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue # necessary if statement", "== Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable", "[CIs]} xticks: list caption to each boxplot (or triplet thereof) offset: float offset", "self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates a boxplot", "met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False) -> str:", "dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name def", "file for all datasets and combined into one plot. out_name: str name of", "ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs: offset = 0.06 # offset smaller", "Parameters ---------- metric : str metric that is collected from the file for", "self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df = values.to_frame(box_cap) yield", "QA4SMPlotter(): \"\"\" Class to create image files of plots from the validation results", "if type == \"mapplot_tc\": # necessary to respect old naming convention for dss", "mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False)", "directory plotting_kwargs: arguments for mapplot function Returns ------- fnames : list List of", "is False True if TC. Then, caption starts with \"Other Data:\" Returns -------", "[] if isinstance(out_types, str): out_types = [out_types] for ext in out_types: fname =", "for the variable if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if", "_standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour for filenames: if provided name", "a image plot for other input values. Parameters ---------- var : QA4SMMetricVariab;e Var", "global for caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats)", "caption starts with \"Other Data:\" Returns ------- capt: str box caption \"\"\" ref_meta,", "ci = {}, {} for df, Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI:", "save or return plotting objects if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return", "to plot Parameters ---------- metric: str metric name add_stats : bool, optional (default:", "qa4sm def _filenames_lut(type:str) -> str: \"\"\" Lookup table for file names Parameters ----------", "type == \"mapplot_tc\": # necessary to respect old naming convention for dss in", "using the values as color. Plots a scatterplot for ISMN and a image", "range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns = [ df.columns[0]", "of the file Parameters ---------- Var: MetricVar variable for a metric type: str", "self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except: warn(\"The", "limits to show confidence intervals self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) ->", "function Returns ------- fnames : list List of files that were created \"\"\"", "\"Other Data:\" Yield ----- df: pd.DataFrame dataframe with variable values and caption name", "_box_caption(Var, tc:bool=False) -> str: \"\"\" Create the dataset part of the box (axis)", "the file \"\"\" out_name = Path(out_name) # provide output directory out_path = self.out_dir.joinpath(out_name)", "in case mds meta is needed \"\"\" # plot label parts = [globals._metric_name[metric]]", "bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure they refer", "the dataset part of the box (axis) caption Parameters ---------- Var: MetricVar variable", "tc: capt = 'Other Data:\\n' + capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic')", "directory plotting_kwargs: arguments for mapplot function Returns ------- fnames: list list of file", "for all variables of a metric to plot Parameters ---------- metric: str metric", "df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below workaround to avoid same", "out_path = self.out_dir.joinpath(out_name) # provide output file type if not out_path.suffix: if out_type[0]", "between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title =", "if statement to prevent key error when no CIs are in the netCDF", "netCDF if ci: bounds = ci[id] else: bounds = ci # create plot", "optional. Default is None Specified in case mds meta is needed \"\"\" #", "= [] # we take the last iterated value for Var and use", "ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr:", "metric group if Var.g == 0: title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name", "-> str: \"\"\" Lookup table for plot titles Parameters ---------- type: str type", "metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below workaround to avoid", "out_dir = self.img.filepath.parent # use default otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png')", "or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g ==", "respect old naming convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']])", "the extensions \"\"\" fnames = [] # group Vars and CIs relative to", "metric: str metric name add_stats : bool, optional (default: from globals) Add stats", "necessary to respect old naming convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric,", "# create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if", "according to the type of plot Parameters ---------- Var: MetricVar variable for a", "meta is needed \"\"\" # plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label", "caption if Var.g == 0: offset = 0.02 # offset larger as common", "str: \"\"\" Create name of the file Parameters ---------- Var: MetricVar variable for", "offset smaller as CI variables have a larger caption if Var.g == 0:", "image plot for other input values. Parameters ---------- var : QA4SMMetricVariab;e Var in", "-> tuple: \"\"\" Plot and save boxplot and mapplot for a certain metric", "values are all Nan or NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue #", "type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0])", "plotted if np.isnan(df.to_numpy()).all(): continue # necessary if statement to prevent key error when", "files should be saved in save_file: bool, optional. Default is False wether to", "in global for caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds,", "list of file names with all the extensions \"\"\" fnames, values = [],", "directory plotting_kwargs: arguments for mapplot function. \"\"\" Metric = self.img.metrics[metric] if Metric.g ==", "type='mapplot_tc') # use title for plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not", "it is saved as .png to self.out_dir Parameters ---------- out_name : str output", "lower CI bounds of Variable, if present bounds = [] for ci_df, ci_Var", "default otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour", "plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False,", "e: message = \"type '{}' is not in the lookup table\".format(type) warn(message) def", "need reference dataset from variables (i.e. is the same): for Var in self.img._iter_vars(**{'metric':metric}):", "= [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for", "'{}' is not in the lookup table\".format(type) warn(message) def create_title(self, Var, type:str) ->", "get the mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean()", "Lookup table for plot titles Parameters ---------- type: str type of plot \"\"\"", "Var.metric_ds bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure they", "str Name of a metric. File is searched for variables for that metric.", "0 or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif Metric.g", "Var=Var, **plotting_kwargs ) # save. Below workaround to avoid same names if not", "2: title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var,", "save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return fig, ax def", "list of file names with all the extensions \"\"\" ref_meta, mds_meta, other_meta =", "'png' Returns ------- outname: pathlib.Path correct path of the file \"\"\" out_name =", "out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour for filenames: if", "to the same dataset metric_tc, ci = {}, {} for df, Var in", "file in the output directory plotting_kwargs: arguments for _boxplot_definition function Returns ------- fnames:", "filenames: if provided name has extension, it is kept; otherwise, it is saved", "plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot and save", "fnames = [] if isinstance(out_types, str): out_types = [out_types] for ext in out_types:", "objects for further processing. Parameters ---------- metric : str metric that is collected", "metrics. Saves a figure and returns Matplotlib fig and ax objects for further", "class QA4SMPlotter(): \"\"\" Class to create image files of plots from the validation", "titles[type] except KeyError as e: message = \"type '{}' is not in the", "storing the netCDF file\"\"\" if out_dir: out_dir = Path(out_dir) # use directory if", "the values as color. Plots a scatterplot for ISMN and a image plot", "if Var.g == 0: title = \"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var,", "globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df", "or list extensions which the files should be saved in save_file: bool, optional.", "are found med: bool iqr: bool count: bool statistics Returns ------- stats: str", "out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create the metric", "mapplot function. \"\"\" Metric = self.img.metrics[metric] if Metric.g == 0 or Metric.g ==", "the output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames : list", "offset = 0.06 # offset smaller as CI variables have a larger caption", "file names with all the extensions \"\"\" fnames = [] if isinstance(out_types, str):", "a boxplot for common and double metrics. Saves a figure and returns Matplotlib", "iqr = abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR:", "self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) return fnames_bplot, fnames_mapplot", "[] for Var in self.img._iter_vars(**{'metric':metric}): if not (np.isnan(Var.values.to_numpy()).all() or Var.is_CI): fns = self.mapplot_var(Var,", "Var in enumerate(Vars): values = Var.values[Var.varname] # changes if it's a common-type Var", "not type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']]) else:", "map for. out_name: str name of output file out_types: str or list extensions", "for caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else:", "all the extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize", "to avoid same names if not out_name: save_name = self.create_filename(Var, type='boxplot_tc') else: save_name", "path if specified, otherwise same directory as the one storing the netCDF file\"\"\"", "values are all Nan or NaNf - not plotted else: continue if save_files:", "out_name = self.create_filename(Var, type='boxplot_basic') # save or return plotting objects if save_files: fnames", "== 0 or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) elif", "not been loaded. 'load_data' needs to be \" \"set to 'True' in the", "-*- from pathlib import Path import seaborn as sns import pandas as pd", "\"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize #", "and save boxplot and mapplot for a certain metric Parameters ---------- metric: str", "Var.get_varmeta() # fetch parts of the name for the variable if not type", "other = [meta for meta in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'],", "names = Var.metric_ds bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): # make", "and \\n'.join(ds_parts) if tc: capt = 'Other Data:\\n' + capt return capt @staticmethod", "values = Var.values[Var.varname] # changes if it's a common-type Var if Var.g ==", "import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import * from warnings import warn class", "out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all Nan or NaNf - not plotted", "stats @staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create the dataset part of", "not Var.is_CI: # concat upper and lower CI bounds of Variable, if present", "in self._yield_values(metric=metric): # make sure they refer to the right variable if ci_Var.is_CI", "out_dir = Path(out_dir) # use directory if specified if not out_dir.exists(): out_dir.mkdir() #", "when no CIs are in the netCDF if ci: bounds = ci[id] else:", "save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if save_files: return fnames def mapplot_var(", "name def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable with pandas dataframes", "= abs(float(iqr.loc[0.25])) met_str = [] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr))", "n, Var in enumerate(Vars): values = Var.values[Var.varname] # changes if it's a common-type", "= self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name =", "else: box_cap = box_cap_ds df = values.to_frame(box_cap) yield df, Var def _boxplot_definition( self,", "image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg object has not been loaded. 'load_data'", "make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file or just return the image if", "= pd.concat(bounds, axis=1) # get the mean CI range diff = bounds[\"upper\"] -", "self, Var, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Plots values to", "values in metric_tc.items(): dfs, Var = values df = pd.concat(dfs) # values are", "variables of a metric to plot Parameters ---------- metric: str metric name add_stats", "plot fig, ax = self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not", "out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save or return plotting objects if save_files:", "if not out_dir.exists(): out_dir.mkdir() # make if not existing else: out_dir = self.img.filepath.parent", "**plotting_kwargs) # values are all Nan or NaNf - not plotted else: continue", "values.to_frame(box_cap) yield df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None,", "part of the box (axis) caption Parameters ---------- Var: MetricVar variable for a", "= self._standard_filename(out_name, out_type=ext) if fname.exists(): warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return", "not plotted else: continue if save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames def", "optional (default: from globals) Add stats of median, iqr and N to the", "globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width * (len(df.columns) +", "triplet thereof) offset: float offset of boxplots Var: QA4SMMetricVariable, optional. Default is None", "import warn class QA4SMPlotter(): \"\"\" Class to create image files of plots from", "if it's a common-type Var if Var.g == 0: box_cap_ds = 'All datasets'", "box bottom. tc: bool, default is False True if TC. Then, caption starts", "dataframe values = pd.concat(values) # values are all Nan or NaNf - not", "({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{} ({}) \\nwith {}-{}", "as CI variables have a larger caption if Var.g == 0: offset =", "True if TC. Then, caption starts with \"Other Data:\" Yield ----- df: pd.DataFrame", "if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False)", "return names[type] except KeyError as e: message = \"type '{}' is not in", "if globals.boxplot_printnumbers: box_stats = self._box_stats(values) box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds", "dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs", "= self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var, type:str) -> str: \"\"\" Create name", "the metric part with stats of the box (axis) caption Parameters ---------- ds:", "the output directory plotting_kwargs: arguments for _boxplot_definition function Returns ------- fnames: list list", "parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts) return name def _yield_values(self,", "file in the output directory plotting_kwargs: arguments for mapplot function Returns ------- fnames", "mean CI range diff = bounds[\"upper\"] - bounds[\"lower\"] ci_range = float(diff.mean()) df.columns =", "\\n{} \\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{} \\nfor {}-{}", "import QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import * from warnings import", "] if id in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if id in", "({}) as the reference', 'mapplot_tc': '{} for {}-{} ({}) with {}-{} ({}) and", "as pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import", "Var: MetricVar variable for a metric tc: bool, default is False True if", "df: pd.DataFrame dataframe with variable values and caption name Var: QA4SMMetricVariable variable corresponding", "value for Var and use it for the file name for df, Var", "is None Specified in case mds meta is needed \"\"\" # plot label", "title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs: offset =", "the loaded file. Parameters ---------- metric : str Name of a metric. File", "def create_filename(self, Var, type:str) -> str: \"\"\" Create name of the file Parameters", "- not plotted if np.isnan(df.to_numpy()).all(): continue # necessary if statement to prevent key", "name of the file Parameters ---------- Var: MetricVar variable for a metric type:", "ax def _save_plot(self, out_name:str, out_types:str='png') -> list: \"\"\" Save plot with name to", "Parameters ---------- out_name: str name of output file out_types: str or list extensions", "metric out_types: str or list extensions which the files should be saved in", "Dict of dataframes with the lower and upper confidence intervals shape: {\"upper\"/\"lower\": [CIs]}", "table for plot titles Parameters ---------- type: str type of plot \"\"\" titles", "bounds: # could be that variable doesn't have CIs bounds = pd.concat(bounds, axis=1)", "output generated plot. If None, defaults to the current working directory. \"\"\" self.img", "title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc') # use title for plot,", "def _titles_lut(type:str) -> str: \"\"\" Lookup table for plot titles Parameters ---------- type:", "for filenames: if provided name has extension, it is kept; otherwise, it is", "the current working directory. \"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref =", "[ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable doesn't have CIs bounds", "in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name = name.format(*parts)", "'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except KeyError as", "\"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width * (len(df.columns) + 1) figsize =", "str or list extensions which the files should be saved in Returns -------", "the initialization of the Image.\") def get_dir(self, out_dir:str) -> Path: \"\"\"Use output path", "_get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create parts for title according to the type", "convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric])", "globals.boxplot_width * (len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height] fig, ax = boxplot(", "out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot for all variables for a", "QA4SMImg import qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import * from warnings import warn", "for plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None, False]: make_watermark(fig,", "or return plotting objects if save_files: fns = self._save_plot(save_name, out_types=out_types) fnames.extend(fns) plt.close('all') if", "= Var.get_varmeta() ds_parts = [] id, meta = mds_meta if tc: id, meta", "-> list: \"\"\" Save plot with name to self.out_dir Parameters ---------- out_name: str", "# save file or just return the image if save_files: fnames = self._save_plot(out_name,", "ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below", "key error when no CIs are in the netCDF if ci: bounds =", "fig and ax objects for further processing. Parameters ---------- metric : str metric", "QA4SMImg object has not been loaded. 'load_data' needs to be \" \"set to", "id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts) if tc: capt = 'Other", "= Var.values[Var.varname] # changes if it's a common-type Var if Var.g == 0:", "for _boxplot_definition function Returns ------- fnames: list list of file names with all", "a shorter caption if globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset) return fig,", "self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return fig, ax def boxplot_tc( # todo:", "fnames = self._save_plot(out_name, out_types=out_types) return fnames else: return fig, ax def mapplot_metric( self,", "mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is sutomatically adjusted (as", "med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats =", "out_name = self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name = self.create_filename(Var, type='mapplot_tc')", "for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']]) parts.extend([Var.metric, mds_meta[0], mds_meta[1]['short_name']]) parts.extend([mds_meta[0], mds_meta[1]['short_name'], Var.metric]) name", "- not plotted else: continue if save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames", "globals.boxplot_height] fig, ax = boxplot( df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not", "if self.img.has_CIs: offset = 0.06 # offset smaller as CI variables have a", "---------- Var: MetricVar variable for a metric type: str type of plot \"\"\"", "= bounds[\"upper\"] - bounds[\"lower\"] ci_range = diff.mean() df.columns = [ df.columns[0] + \"\\nMean", "files of plots from the validation results in a QA4SMImage \"\"\" def __init__(self,", "False True if TC. Then, caption starts with \"Other Data:\" Returns ------- capt:", "a qa4sm output file. Parameters ---------- image : QA4SMImg The results object. out_dir", "could be that variable doesn't have CIs bounds = pd.concat(bounds, axis=1) # get", "metric. out_name: str name of output file out_types: str or list extensions which", "if specified, otherwise same directory as the one storing the netCDF file\"\"\" if", "self.img.metrics[metric] if Metric.g == 0 or Metric.g == 2: fnames_bplot = self.boxplot_basic(metric=metric, out_types=out_types,", "False wether to save the file in the output directory plotting_kwargs: arguments for", "make the map for. out_name: str name of output file out_types: str or", "Var for id, values in metric_tc.items(): dfs, Var = values df = pd.concat(dfs)", "= mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is sutomatically adjusted", "self.img.ref_dataset_grid_stepsize # create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, #", "Default is True. all plotted images are saved to the output directory plotting_kwargs:", "Var and use it for the file name for df, Var in self._yield_values(metric=metric):", "Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: # could be that variable doesn't", "= self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all Nan or NaNf", "fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save.", "caption Parameters ---------- ds: pd.Series data on which stats are found med: bool", "in the image to make the map for. out_name: str name of output", "is not in the lookup table\".format(type) warn(message) def create_title(self, Var, type:str) -> str:", "ref, mds, other = [meta for meta in Var.get_varmeta()] if type == 'boxplot_basic':", "or NaNf - not plotted if np.isnan(values.to_numpy()).all(): return None # create plot fig,", "Default is None Specified in case mds meta is needed \"\"\" # plot", "further processing. Parameters ---------- metric : str metric that is collected from the", "# plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate", "df, Var in self._yield_values(metric=metric): if not Var.is_CI: # concat upper and lower CI", "ax objects for further processing. Parameters ---------- metric : str metric that is", "on the metric group if Var.g == 0: title = \"{} between all", "str): out_types = [out_types] for ext in out_types: fname = self._standard_filename(out_name, out_type=ext) if", "it's a common-type Var if Var.g == 0: box_cap_ds = 'All datasets' else:", "names with all the extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric =", "to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars): values", "mds[1]['pretty_version']]) parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts", "'.': out_type = '.' + out_type out_path = out_path.with_suffix(out_type) return out_path @staticmethod def", "'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of \\n{}", "= [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ] if id in", "for n, Var in enumerate(Vars): values = Var.values[Var.varname] # changes if it's a", "we take the last iterated value for Var and use it for the", "_boxplot_definition function Returns ------- fnames: list list of file names with all the", "plotting_kwargs: arguments for _boxplot_definition function Returns ------- fnames: list list of file names", "CI bounds of Variable, if present bounds = [] for ci_df, ci_Var in", "name of the metric out_types: str or list extensions which the files should", "warnings.warn('Overwriting file {}'.format(fname.name)) plt.savefig(fname, dpi='figure', bbox_inches='tight') fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str,", "= self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs ) # save. Below workaround", "saved to the output directory plotting_kwargs: arguments for mapplot function. \"\"\" Metric =", "for Var in self.img._iter_vars(**{'metric':metric}): Var = Var break title = self.create_title(Var, type=type) ax.set_title(title,", "object has not been loaded. 'load_data' needs to be \" \"set to 'True'", "the files should be saved in Returns ------- fnames: list list of file", "and lower CI bounds of Variable, if present bounds = [] for ci_df,", "for that metric. out_name: str name of output file out_types: str or list", "dataset metric_tc, ci = {}, {} for df, Var in self._yield_values(metric=metric, tc=True): if", "globals from qa4sm_reader.plot_utils import * from warnings import warn class QA4SMPlotter(): \"\"\" Class", "[None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file or just return the", "med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create the metric part with stats of", "name of output file out_types: str or list extensions which the files should", "tuple: \"\"\" Get iterable with pandas dataframes for all variables of a metric", "try: return names[type] except KeyError as e: message = \"type '{}' is not", "type:str) -> str: \"\"\" Create name of the file Parameters ---------- Var: MetricVar", "Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: #", "the files should be saved in save_file: bool, optional. Default is False wether", "create plot fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var, **plotting_kwargs )", "return out_path @staticmethod def _box_stats(ds:pd.Series, med:bool=True, iqr:bool=True, count:bool=True) -> str: \"\"\" Create the", "if Var.g == 0: box_cap_ds = 'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc)", "in [None, False]: make_watermark(fig, globals.watermark_pos, for_map=True, offset=0.04) # save file or just return", "= {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return", "'{} for {}-{} ({}) with {}-{} ({}) and {}-{} ({}) as the references'}", "in ci.keys(): ci[id].append(bounds) else: ci[id] = [bounds] if id in metric_tc.keys(): metric_tc[id][0].append(df) else:", "to plot type: str one of _titles_lut ci: dict Dict of dataframes with", "= {}, {} for df, Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id,", "out_name = self.create_filename(Var, type='mapplot_common') elif Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name", "if not type in [\"mapplot_tc\", \"mapplot_double\"]: parts = [Var.metric] if mds_meta: parts.extend([mds_meta[0], mds_meta[1]['short_name']])", "\"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch parts of", "parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title = self._titles_lut(type=type).format(*parts) return title def create_filename(self, Var,", ") -> tuple: \"\"\" Define parameters of plot Parameters ---------- df: pd.DataFrame dataframe", "filename (with or without extension) out_type : str, optional contains file extensions to", ": QA4SMMetricVariab;e Var in the image to make the map for. out_name: str", "= diff.mean() df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ]", "capt = '\\n and \\n'.join(ds_parts) if tc: capt = 'Other Data:\\n' + capt", "stats of median, iqr and N to the box bottom. tc: bool, default", "datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) # setting in global for caption stats", "with name to self.out_dir Parameters ---------- out_name: str name of output file out_types:", "settings depend on the metric group if Var.g == 0: title = \"{}", "---------- df: pd.DataFrame dataframe to plot type: str one of _titles_lut ci: dict", "doesn't have CIs bounds = pd.concat(bounds, axis=1) # get the mean CI range", ") -> list: \"\"\" Creates a boxplot for TC metrics. Saves a figure", "'\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False) -> str: \"\"\" Create the dataset", "File is searched for variables for that metric. out_name: str name of output", "of file names with all the extensions \"\"\" fnames = [] if isinstance(out_types,", "[] for ci_df, ci_Var in self._yield_values(metric=metric): # make sure they refer to the", "\"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff() iqr = abs(float(iqr.loc[0.25])) met_str = []", "in Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc',", "= self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else: return fig, ax def boxplot_tc( #", "median, iqr and N to the box bottom. tc: bool, default is False", "if ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns", "which the files should be saved in Returns ------- fnames: list list of", "title for plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None, False]:", "self.img.vars except: warn(\"The initialized QA4SMImg object has not been loaded. 'load_data' needs to", "out_dir.exists(): out_dir.mkdir() # make if not existing else: out_dir = self.img.filepath.parent # use", "count: bool statistics Returns ------- stats: str caption with summary stats \"\"\" #", "should be saved in Returns ------- fnames: list list of file names with", "name for df, Var in self._yield_values(metric=metric): if not Var.is_CI: # concat upper and", "= self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title = self.create_title(Var=Var, type='mapplot_tc') out_name", "\"set to 'True' in the initialization of the Image.\") def get_dir(self, out_dir:str) ->", "a metric tc: bool, default is False True if TC. Then, caption starts", "bool count: bool statistics Returns ------- stats: str caption with summary stats \"\"\"", ") # save. Below workaround to avoid same names if not out_name: save_name", "from pathlib import Path import seaborn as sns import pandas as pd from", "names[type] except KeyError as e: message = \"type '{}' is not in the", "sns import pandas as pd from qa4sm_reader.img import QA4SMImg import qa4sm_reader.globals as globals", ") -> list: \"\"\" Mapplot for all variables for a given metric in", "\\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns = [ci_Var.bound] bounds.append(ci_df)", "bool, optional. Default is True. all plotted images are saved to the output", "dict Dict of dataframes with the lower and upper confidence intervals shape: {\"upper\"/\"lower\":", "Then, caption starts with \"Other Data:\" Yield ----- df: pd.DataFrame dataframe with variable", "\"\"\" Lookup table for plot titles Parameters ---------- type: str type of plot", "= values df = pd.concat(dfs) # values are all Nan or NaNf -", "= [], [] ci = [] # we take the last iterated value", "Plots a scatterplot for ISMN and a image plot for other input values.", "# use default otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\"", "parameters of plot Parameters ---------- df: pd.DataFrame dataframe to plot type: str one", "Var: MetricVar variable for a metric type: str type of plot \"\"\" name", "of a metric. File is searched for variables for that metric. out_name: str", "values df = pd.concat(dfs) # values are all Nan or NaNf - not", "Returns ------- outname: pathlib.Path correct path of the file \"\"\" out_name = Path(out_name)", "out_dir.mkdir() # make if not existing else: out_dir = self.img.filepath.parent # use default", "variable for a metric type: str type of plot Returns ------- parts: list", "box (axis) caption Parameters ---------- ds: pd.Series data on which stats are found", "with all the extensions \"\"\" fnames = [] if isinstance(out_types, str): out_types =", "box_cap_ds = 'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) # setting in global", "ci=ci, **plotting_kwargs ) if not out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save or", "plotted else: continue if save_files: fnames.extend(fns) plt.close('all') if fnames: return fnames def plot_metric(", "right variable if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if", "create_title(self, Var, type:str) -> str: \"\"\" Create title of the plot Parameters ----------", "----- df: pd.DataFrame dataframe with variable values and caption name Var: QA4SMMetricVariable variable", "-> tuple: \"\"\" Get iterable with pandas dataframes for all variables of a", "create mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None,", "message = \"type '{}' is not in the lookup table\".format(type) warn(message) def create_title(self,", "# values are all Nan or NaNf - not plotted if np.isnan(df.to_numpy()).all(): continue", "Creates a boxplot for common and double metrics. Saves a figure and returns", "\"\"\" Plots values to a map, using the values as color. Plots a", "# changes if it's a common-type Var if Var.g == 0: box_cap_ds =", "ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not Var: # when we only need", "fnames.extend(fns) plt.close('all') if save_files: return fnames def mapplot_var( self, Var, out_name:str=None, out_types:str='png', save_files:bool=False,", "globals.watermark_pos not in [None, False]: make_watermark(fig, offset=offset) return fig, ax def _save_plot(self, out_name:str,", "fnames.append(fname.absolute()) return fnames def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) ->", "output directory plotting_kwargs: arguments for _boxplot_definition function Returns ------- fnames: list list of", "{:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return", "all datasets and combined into one plot. out_name: str name of output file", "tc=True): if not Var.is_CI: id, names = Var.metric_ds bounds = [] for ci_df,", "for a metric type: str type of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var,", "The results object. out_dir : str, optional (default: None) Path to output generated", "fnames: return fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple:", "-> list: \"\"\" Mapplot for all variables for a given metric in the", "def boxplot_basic( self, metric:str, out_name:str=None, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Creates", "self.img.filepath.parent # use default otherwise return out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path:", "out_dir:str=None): \"\"\" Create box plots from results in a qa4sm output file. Parameters", "MetricVar variable for a metric type: str type of plot \"\"\" name =", "\"\"\" Metric = self.img.metrics[metric] if Metric.g == 0 or Metric.g == 2: fnames_bplot", "shape: {\"upper\"/\"lower\": [CIs]} xticks: list caption to each boxplot (or triplet thereof) offset:", "= self._boxplot_definition( metric=metric, df=values, type='boxplot_basic', ci=ci, **plotting_kwargs ) if not out_name: out_name =", "{}-{} ({}) as the reference', 'mapplot_tc': '{} for {}-{} ({}) with {}-{} ({})", "count:bool=True) -> str: \"\"\" Create the metric part with stats of the box", "figwidth = globals.boxplot_width * (len(df.columns) + 1) figsize = [figwidth, globals.boxplot_height] fig, ax", "mds meta is needed \"\"\" # plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']]))", "df, Var in self._yield_values(metric=metric, tc=True): if not Var.is_CI: id, names = Var.metric_ds bounds", "of median, iqr and N to the box bottom. tc: bool, default is", "None, defaults to the current working directory. \"\"\" self.img = image self.out_dir =", "metric type: str type of plot Returns ------- parts: list list of parts", "the same dataset metric_tc, ci = {}, {} for df, Var in self._yield_values(metric=metric,", "= [] if isinstance(out_types, str): out_types = [out_types] for ext in out_types: fname", "Path to output generated plot. If None, defaults to the current working directory.", "---------- Var: MetricVar variable for a metric tc: bool, default is False True", "file names with all the extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric", "'All datasets' else: box_cap_ds = self._box_caption(Var, tc=tc) # setting in global for caption", "box (axis) caption Parameters ---------- Var: MetricVar variable for a metric tc: bool,", "parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) if type == 'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod", "**kwargs ) -> tuple: \"\"\" Define parameters of plot Parameters ---------- df: pd.DataFrame", "for a metric tc: bool, default is False True if TC. Then, caption", "mapplot fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent", "all Nan or NaNf - not plotted if np.isnan(values.to_numpy()).all(): return None # create", "bounds = ci[id] else: bounds = ci # create plot fig, ax =", "parts = [ref_meta[0], ref_meta[1]['short_name']] if type == \"mapplot_tc\": # necessary to respect old", "metric. File is searched for variables for that metric. out_name: str name of", "output path if specified, otherwise same directory as the one storing the netCDF", "'{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df = values.to_frame(box_cap) yield df, Var def", "same): for Var in self.img._iter_vars(**{'metric':metric}): Var = Var break title = self.create_title(Var, type=type)", "# values are all Nan or NaNf - not plotted if np.isnan(values.to_numpy()).all(): return", "corresponding to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n, Var in enumerate(Vars):", "a given metric in the loaded file. Parameters ---------- metric : str Name", "type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs: offset = 0.06 # offset", "parts for title \"\"\" parts = [] ref, mds, other = [meta for", "\"\"\" Class to create image files of plots from the validation results in", "not plotted if np.isnan(df.to_numpy()).all(): continue # necessary if statement to prevent key error", "enumerate(Vars): values = Var.values[Var.varname] # changes if it's a common-type Var if Var.g", "variable values and caption name Var: QA4SMMetricVariable variable corresponding to the dataframe \"\"\"", "extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize", "warn(message) @staticmethod # todo: cange file names and convention in qa4sm def _filenames_lut(type:str)", "with stats of the box (axis) caption Parameters ---------- ds: pd.Series data on", "[globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts) # generate plot figwidth = globals.boxplot_width *", "Create the dataset part of the box (axis) caption Parameters ---------- Var: MetricVar", "type if not out_path.suffix: if out_type[0] != '.': out_type = '.' + out_type", "file names with all the extensions \"\"\" fnames = [] # group Vars", "CIs relative to the same dataset metric_tc, ci = {}, {} for df,", "Default is False wether to save the file in the output directory plotting_kwargs:", "return plotting objects if save_files: fnames = self._save_plot(out_name, out_types=out_types) plt.close('all') return fnames else:", "bounds[\"lower\"] ci_range = diff.mean() df.columns = [ df.columns[0] + \"\\nMean CI range:\" \"", "[] if med: met_str.append('Median: {:.3g}'.format(ds.median())) if iqr: met_str.append('IQR: {:.3g}'.format(iqr)) if count: met_str.append('N: {:d}'.format(ds.count()))", "list list of file names with all the extensions \"\"\" fnames = []", ") -> list: \"\"\" Creates a boxplot for common and double metrics. Saves", "'{} for {}-{} ({}) with {}-{} ({}) as the reference', 'mapplot_tc': '{} for", "ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is sutomatically", "str type of plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta()", "of file names with all the extensions \"\"\" ref_meta, mds_meta, other_meta = Var.get_varmeta()", "images are saved to the output directory plotting_kwargs: arguments for mapplot function. \"\"\"", "CI range:\" \" {:.3g}\".format(ci_range) ] if id in ci.keys(): ci[id].append(bounds) else: ci[id] =", "which stats are found med: bool iqr: bool count: bool statistics Returns -------", "def get_dir(self, out_dir:str) -> Path: \"\"\"Use output path if specified, otherwise same directory", "it is kept; otherwise, it is saved as .png to self.out_dir Parameters ----------", "and {}-{} ({}) as the references'} try: return titles[type] except KeyError as e:", "variable for a metric type: str type of plot \"\"\" parts = [globals._metric_name[Var.metric]]", "a boxplot for TC metrics. Saves a figure and returns Matplotlib fig and", "list list of parts for title \"\"\" parts = [] ref, mds, other", "({}) as the references'} try: return titles[type] except KeyError as e: message =", "def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs ) -> list: \"\"\" Mapplot for", "out_name = Path(out_name) # provide output directory out_path = self.out_dir.joinpath(out_name) # provide output", "other_meta = Var.get_varmeta() # fetch parts of the name for the variable if", "except KeyError as e: message = \"type '{}' is not in the lookup", "self.create_filename(Var, type='boxplot_tc') else: save_name = out_name # save or return plotting objects if", "qa4sm_reader.globals as globals from qa4sm_reader.plot_utils import * from warnings import warn class QA4SMPlotter():", "to the type of plot Parameters ---------- Var: MetricVar variable for a metric", "file extensions to be plotted. If None, uses 'png' Returns ------- outname: pathlib.Path", "fig, ax = mapplot(df=Var.values[Var.varname], metric=metric, ref_short=ref_meta[1]['short_name'], ref_grid_stepsize=ref_grid_stepsize, plot_extent=None, # if None, extent is", "self.mapplot_var(Var, out_name=None, out_types=out_types, save_files=save_files, **plotting_kwargs) # values are all Nan or NaNf -", "= 0.06 # offset smaller as CI variables have a larger caption if", "id, names = Var.metric_ds bounds = [] for ci_df, ci_Var in self._yield_values(metric=metric): #", "Var break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs:", "if fnames: return fnames def plot_metric( self, metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) ->", "metric:str, out_types:str='png', save_all:bool=True, **plotting_kwargs ) -> tuple: \"\"\" Plot and save boxplot and", "Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07, Var=None, **kwargs ) ->", "box_cap = '{}\\n{}'.format(box_cap_ds, box_stats) else: box_cap = box_cap_ds df = values.to_frame(box_cap) yield df,", "results in a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\" Create box plots", "Path(out_name) # provide output directory out_path = self.out_dir.joinpath(out_name) # provide output file type", ") if not Var: # when we only need reference dataset from variables", "type: str type of plot \"\"\" parts = [globals._metric_name[Var.metric]] parts.extend(self._get_parts_name(Var=Var, type=type)) title =", "optional contains file extensions to be plotted. If None, uses 'png' Returns -------", "list: \"\"\" Create parts for title according to the type of plot Parameters", "dataframe with variable values and caption name Var: QA4SMMetricVariable variable corresponding to the", "'mapplot_tc': '{} for {}-{} ({}) with {}-{} ({}) and {}-{} ({}) as the", "fnames else: return fig, ax def mapplot_metric( self, metric:str, out_types:str='png', save_files:bool=False, **plotting_kwargs )", "of plot \"\"\" name = self._filenames_lut(type=type) ref_meta, mds_meta, other_meta = Var.get_varmeta() # fetch", "None Specified in case mds meta is needed \"\"\" # plot label parts", "upper and lower CI bounds of Variable, if present bounds = [] for", "tc=tc) # setting in global for caption stats if globals.boxplot_printnumbers: box_stats = self._box_stats(values)", "fnames : list List of files that were created \"\"\" fnames = []", "type: str one of _titles_lut ci: dict Dict of dataframes with the lower", "given metric in the loaded file. Parameters ---------- metric : str Name of", "arguments for mapplot function Returns ------- fnames : list List of files that", "from the file for all datasets and combined into one plot. out_name: str", "is the same): for Var in self.img._iter_vars(**{'metric':metric}): Var = Var break title =", "ci_Var.is_CI and \\ (ci_Var.metric_ds == Var.metric_ds) and \\ (ci_Var.other_dss == Var.other_dss): ci_df.columns =", "if specified if not out_dir.exists(): out_dir.mkdir() # make if not existing else: out_dir", "# add watermark if self.img.has_CIs: offset = 0.06 # offset smaller as CI", "from the validation results in a QA4SMImage \"\"\" def __init__(self, image, out_dir:str=None): \"\"\"", "'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}', 'mapplot_tc': 'overview_{}-{}_and_{}-{}_and_{}-{}_{}_for_{}-{}'} try: return names[type] except", "of plots from the validation results in a QA4SMImage \"\"\" def __init__(self, image,", "Var: QA4SMMetricVariable variable corresponding to the dataframe \"\"\" Vars = self.img._iter_vars(**{'metric':metric}) for n,", "Var.g == 0: offset = 0.02 # offset larger as common metrics have", "with pandas dataframes for all variables of a metric to plot Parameters ----------", "df=df, ci=ci, label=label, figsize=figsize, dpi=globals.dpi ) if not Var: # when we only", "{}-{} ({}) with {}-{} ({}) as the reference', 'mapplot_tc': '{} for {}-{} ({})", "not out_name: out_name = self.create_filename(Var, type='boxplot_basic') # save or return plotting objects if", "# save. Below workaround to avoid same names if not out_name: save_name =", "data on which stats are found med: bool iqr: bool count: bool statistics", "Var.metric]) name = name.format(*parts) return name def _yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\"", "Returns ------- stats: str caption with summary stats \"\"\" # interquartile range iqr", "id, meta = mds_meta if tc: id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'],", "to prevent key error when no CIs are in the netCDF if ci:", "KeyError as e: message = \"type '{}' is not in the lookup table\".format(type)", "for TC metrics. Saves a figure and returns Matplotlib fig and ax objects", "\"\"\" Lookup table for file names Parameters ---------- type: str type of plot", "is searched for variables for that metric. out_name: str name of output file", "\"\"\" Mapplot for all variables for a given metric in the loaded file.", "of the file \"\"\" out_name = Path(out_name) # provide output directory out_path =", "# use directory if specified if not out_dir.exists(): out_dir.mkdir() # make if not", "return out_dir def _standard_filename(self, out_name:str, out_type:str='png') -> Path: \"\"\" Standardized behaviour for filenames:", "out_types:str='png') -> list: \"\"\" Save plot with name to self.out_dir Parameters ---------- out_name:", "---------- out_name : str output filename (with or without extension) out_type : str,", "existing else: out_dir = self.img.filepath.parent # use default otherwise return out_dir def _standard_filename(self,", "file type if not out_path.suffix: if out_type[0] != '.': out_type = '.' +", "plot \"\"\" titles = {'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas the", "{}-{} ({}) as the references'} try: return titles[type] except KeyError as e: message", "plot for other input values. Parameters ---------- var : QA4SMMetricVariab;e Var in the", "old naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}', 'mapplot_double': 'overview_{}-{}_and_{}-{}_{}',", "of the box (axis) caption Parameters ---------- ds: pd.Series data on which stats", "for title according to the type of plot Parameters ---------- Var: MetricVar variable", "add_stats : bool, optional (default: from globals) Add stats of median, iqr and", "Var.get_varmeta()] if type == 'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic',", "table\".format(type) warn(message) def create_title(self, Var, type:str) -> str: \"\"\" Create title of the", "= \"{} between all datasets\".format(globals._metric_name[metric]) out_name = self.create_filename(Var, type='mapplot_common') elif Var.g == 2:", "Parameters ---------- Var: MetricVar variable for a metric tc: bool, default is False", "ci[id] else: bounds = ci # create plot fig, ax = self._boxplot_definition( metric=metric,", "if TC. Then, caption starts with \"Other Data:\" Yield ----- df: pd.DataFrame dataframe", "Data:\\n' + capt return capt @staticmethod def _get_parts_name(Var, type='boxplot_basic') -> list: \"\"\" Create", "Var.g == 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title", "'boxplot_basic': parts.append(ref[0]) parts.extend([ref[1]['pretty_name'], ref[1]['pretty_version']]) elif type in ['boxplot_tc', 'mapplot_basic', 'mapplot_tc']: parts.append(mds[0]) parts.extend([mds[1]['pretty_name'], mds[1]['pretty_version']])", "if Var.g == 0: offset = 0.02 # offset larger as common metrics", "self.out_dir Parameters ---------- out_name: str name of output file out_types: str or list", "stats: str caption with summary stats \"\"\" # interquartile range iqr = ds.quantile(q=[0.75,0.25]).diff()", "needed \"\"\" # plot label parts = [globals._metric_name[metric]] parts.append(globals._metric_description[metric].format( globals._metric_units[self.ref['short_name']])) label = \"{}{}\".format(*parts)", "metric_tc.keys(): metric_tc[id][0].append(df) else: metric_tc[id] = [df], Var for id, values in metric_tc.items(): dfs,", "return titles[type] except KeyError as e: message = \"type '{}' is not in", "= self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars except: warn(\"The initialized QA4SMImg object has", "If None, uses 'png' Returns ------- outname: pathlib.Path correct path of the file", "directory. \"\"\" self.img = image self.out_dir = self.get_dir(out_dir=out_dir) self.ref = image.datasets.ref try: self.img.vars", "ci=None, offset=0.07, Var=None, **kwargs ) -> tuple: \"\"\" Define parameters of plot Parameters", "else: save_name = out_name # save or return plotting objects if save_files: fns", "id, meta = other_meta ds_parts.append('{}-{}\\n({})'.format( id, meta['pretty_name'], meta['pretty_version'])) capt = '\\n and \\n'.join(ds_parts)", "in the loaded file. Parameters ---------- metric : str Name of a metric.", "TC. Then, caption starts with \"Other Data:\" Returns ------- capt: str box caption", "out_dir: out_dir = Path(out_dir) # use directory if specified if not out_dir.exists(): out_dir.mkdir()", "= out_name # save or return plotting objects if save_files: fns = self._save_plot(save_name,", "label=label, figsize=figsize, dpi=globals.dpi ) if not Var: # when we only need reference", "offset = 0.02 # offset larger as common metrics have a shorter caption", "ci_range = diff.mean() df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range)", "count: met_str.append('N: {:d}'.format(ds.count())) stats = '\\n'.join(met_str) return stats @staticmethod def _box_caption(Var, tc:bool=False) ->", "variables have a larger caption if Var.g == 0: offset = 0.02 #", "save boxplot and mapplot for a certain metric Parameters ---------- metric: str name", "if ci_Var.is_CI and (ci_Var.metric_ds == Var.metric_ds): ci_df.columns = [ci_Var.bound] bounds.append(ci_df) if bounds: #", "== 2: title = self.create_title(Var=Var, type='mapplot_basic') out_name = self.create_filename(Var, type='mapplot_double') else: title =", "3: fnames_bplot = self.boxplot_tc(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs) fnames_mapplot = self.mapplot_metric(metric=metric, out_types=out_types, save_files=save_all, **plotting_kwargs)", "-> list: \"\"\" Plots values to a map, using the values as color.", "Var in the image to make the map for. out_name: str name of", "= ci # create plot fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc',", "Var = Var break title = self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark", "_yield_values(self, metric:str, tc:bool=False) -> tuple: \"\"\" Get iterable with pandas dataframes for all", "the plot Parameters ---------- Var: MetricVar variable for a metric type: str type", "files should be saved in Returns ------- fnames: list list of file names", "caption to each boxplot (or triplet thereof) offset: float offset of boxplots Var:", "use title for plot, make watermark ax.set_title(title, pad=globals.title_pad) if globals.watermark_pos not in [None,", "# necessary to respect old naming convention for dss in Var.other_dss: parts.extend([dss[0], dss[1]['short_name']])", "= Var.get_varmeta() metric = Var.metric ref_grid_stepsize = self.img.ref_dataset_grid_stepsize # create mapplot fig, ax", "= self.create_title(Var, type=type) ax.set_title(title, pad=globals.title_pad) # add watermark if self.img.has_CIs: offset = 0.06", "file name for df, Var in self._yield_values(metric=metric): if not Var.is_CI: # concat upper", "plotted if np.isnan(values.to_numpy()).all(): return None # create plot fig, ax = self._boxplot_definition( metric=metric,", "Var: MetricVar variable for a metric type: str type of plot \"\"\" parts", "ci # create plot fig, ax = self._boxplot_definition( metric=metric, df=df, ci=bounds, type='boxplot_tc', Var=Var,", "as globals from qa4sm_reader.plot_utils import * from warnings import warn class QA4SMPlotter(): \"\"\"", "correct path of the file \"\"\" out_name = Path(out_name) # provide output directory", "stick to old naming convention names = {'boxplot_basic': 'boxplot_{}', 'mapplot_common': 'overview_{}', 'boxplot_tc': 'boxplot_{}_for_{}-{}',", "self._box_caption(Var, tc=tc) # setting in global for caption stats if globals.boxplot_printnumbers: box_stats =", "datasets and combined into one plot. out_name: str name of output file out_types:", "\"\"\" Create the dataset part of the box (axis) caption Parameters ---------- Var:", "list of file names with all the extensions \"\"\" fnames = [] #", "= values.to_frame(box_cap) yield df, Var def _boxplot_definition( self, metric:str, df:pd.DataFrame, type:str, ci=None, offset=0.07,", ") -> tuple: \"\"\" Plot and save boxplot and mapplot for a certain", "__init__(self, image, out_dir:str=None): \"\"\" Create box plots from results in a qa4sm output", "'mapplot_tc': parts.append(other[0]) parts.extend([other[1]['pretty_name'], other[1]['pretty_version']]) return parts @staticmethod def _titles_lut(type:str) -> str: \"\"\" Lookup", "sure they refer to the right variable if ci_Var.is_CI and \\ (ci_Var.metric_ds ==", "save_files=save_files, **plotting_kwargs) # values are all Nan or NaNf - not plotted else:", "Nan or NaNf - not plotted if np.isnan(values.to_numpy()).all(): return None # create plot", "# provide output directory out_path = self.out_dir.joinpath(out_name) # provide output file type if", "{'boxplot_basic': 'Intercomparison of \\n{} \\nwith {}-{} ({}) \\nas the reference', 'boxplot_tc': 'Intercomparison of", "\"\"\" Create the metric part with stats of the box (axis) caption Parameters", "= float(diff.mean()) df.columns = [ df.columns[0] + \"\\nMean CI range:\" \" {:.3g}\".format(ci_range) ]" ]